query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
sequencelengths
4
101
negative_scores
sequencelengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
set is mutable and dangerous.
def function1(value={1}): print(value)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set():", "def set():\n pass", "def Set(self) -> None:", "def __set__(self, obj, value):\r\n pass", "def set(x):\n pass", "def set(self, key, value):", "def set(self, key, value):", "def set(self, U):\n pass", "def set(self, U):\n pass", "def f_set(self, data):\n raise NotImplementedError(\"Should have implemented this.\")", "def set(self, **kwargs):\n raise NotImplementedError", "def set(self, key: t.Hashable, value: t.Any) -> None:", "def _call_set(vecObj, val):\n res = vecObj.set(val)\n return res", "def __setitem__(self, key, val):\n self.set[key] = val", "def set(self, obj, value):\n pass", "def _set(self, key, value):\n self._data[key] = value\n return self._data[key]", "def __setitem__(self):\n raise ValueError(\"Dataset objects are immutable\")", "def __set__(self, instance, val):\n raise AttributeError(\"Can't set attribute\")", "def set(self, name, value):\n pass", "def test_setitem(self):\n\n vec = Vec3(4, 5, 6)\n\n # Set the values with .set()\n result = vec.set(7, 8, 9)\n\n # Ensure the values got set.\n self.assertEqual(Vec3(7, 8, 9), vec)\n # Ensure the object was also returned\n self.assertIs(result, vec)", "def set(self, obj, value):\n raise NotImplementedError", "def set(self, key, value):\n raise NotImplementedError", "def __setitem__(key, value):", "def __set__(self,obj,val):\n self._check_bounds(val)\n super(List,self).__set__(obj,val)", "def set(self, item, value):\r\n raise NotImplementedError", "def __setitem__(self, key, value):\n pass", "def __setitem__(self, key, value):\n pass", "def __setitem__(self, key, value):\n pass", "def test_set_moe_rewrite(self):\n storage = Storage()\n storage.set('1', 1, 5)\n storage.set('1', 2)\n self.assertEqual(False, '1' in storage._moe_dict, \"Moe for key '1' should be reset.\")", "def __setitem__(self, key, value):\n self.set(key, value)", "def __setitem__(self, key, value):\n self.set(key, value)", "def __setitem__(self, key, value):\n self.set(key, value)", "def _set(self, value, idx):\n if self.array is None:\n \"\"\"return false when the array is None\"\"\"\n return False\n len_array = self._len()\n if idx is not int:\n \"\"\"when the index is not an integer\"\"\"\n raise TypeError(f\"[idx] : {idx} should be an integer\")\n if value is not int:\n \"\"\" when the user enter a value that is not an integer\"\"\"\n raise TypeError(f\"[value] : {value} should be an integer\")\n if idx < len_array and idx >= 0:\n \"\"\"when the user what to modify at an index\"\"\"\n self.array[idx] = value\n else:\n \"\"\"index error. Raise ValueError\"\"\"\n raise ValueError(f\"[index]: {idx} out of range\")", "def set_once(setter):\n set_instances = DescDict()\n @wraps(setter)\n def __set__(desc, instance, value):\n if instance in set_instances:\n raise AttributeError(\"Cannot set a read-only attribute\")\n else:\n set_instances[instance] = True\n setter(desc, instance, value)\n return __set__", "def __setitem__(self, key, value):", "def set(self, value):\n if value == self.value:\n return False\n self.value = value\n return True", "def _checked_set(self, struct, field, value):\n setattr(struct, field, value)\n self._check_field_length(struct.DESCRIPTOR.fields_by_name[field], value)", "def list_set(s_list, i, val):\n require_type(isa(s_list,List), 'parameters of list-set! must be a list')\n s_list[i] = val\n return None", "def __setitem__(name, other):", "def test_set_with_get(self):\n storage = Storage()\n storage.set('1', 1)\n self.assertEqual(1, storage.set('1', 2, get=True), \"Should return previous value\")\n self.assertEqual(2, storage.get('1'), 'Should get new value')\n self.assertEqual(None, storage.set('2', 1, get=True), \"Should return None as there was no key '2'\")", "def __setitem__(self, name, value):\r\n return self.set(name=value)", "def _single_setitem(self, key, item):\n getattr(self._cpp_obj, self._setter)(key, item)", "def __setitem__(self, key, value):\n if not self._is_valid(value):\n value = self._fix_value(value)\n self._inner.__setitem__(key, value)", "def __setitem__(self,key,value):\n if self._extract:\n raise RuntimeError('This archive is read-only!')\n else:\n self._setitem(key,value)", "def _set(self, driver: AbstractHasFeatures, value: Any):\n with driver.lock:\n set_chain(self, driver, value)", "def test_setitem(self):\n\n vec = Vec3(4, 5, 6)\n\n # Set the values with __setitem__\n vec[0] = 14\n vec[1] = 15\n vec[2] = 16\n\n # Ensure the values got set.\n self.assertEqual(14, vec[0])\n self.assertEqual(15, vec[1])\n self.assertEqual(16, vec[2])", "def testMutable(self):\n alist = [1,2,3]\n aref = alist\n alist[1] = \"stasi\"\n self.assertEqual([1,\"stasi\",3],aref)", "def pre_set(self, value):\r\n return value", "def __setitem__(self, item, val):\r\n item.set_value(val, borrow=True)", "def __init__(self):\n self.set = set()", "def testPartialAndIncorrectSetter(self):\n _1 = [ (self.kl[0], 1), \n (self.kl[1], 1), \n (Customer, 1), ] * 2\n self.o.population = _1\n _2 = self.o.population\n self.assertEqual(len(_2), 3, \"got {}\".format(_2))\n _expect = set([(self.kl[0], 2),\n (self.kl[1], 2),\n (getattr(tp, \"RandConso\"), 6)])\n self.assertEqual(_2, _expect, \"something odd\")", "def testContainOnlyImmutables(self):\n aset = set()\n \n aset.add(1)\n aset.add(\"cheka\")\n \n # non-hashable object (that is mutable) objects cannot be contained in set\n self.assertRaises(TypeError, lambda : aset.add([]) )", "def _set_value(o, d):\n if isinstance(o, Param) and not o._mutable:\n return # ignore requests to set immutable params\n else:\n try:\n o.value = d\n except AttributeError:\n o = d # this would be an indexed parameter", "def set(self, name, value, **kwargs):\r\n if kwargs:\r\n if 'getset' in kwargs:\r\n warnings.warn(DeprecationWarning(\r\n \"getset option to 'set' is deprecated, \"\r\n \"use Redis.getset() instead\"))\r\n if kwargs['getset']:\r\n return self.getset(name, value)\r\n if 'preserve' in kwargs:\r\n warnings.warn(DeprecationWarning(\r\n \"preserve option to 'set' is deprecated, \"\r\n \"use Redis.setnx() instead\"))\r\n if kwargs['preserve']:\r\n return self.setnx(name, value)\r\n return self.format_bulk('SET', name, value)", "def setUniformValue(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads\n pass", "def set_py(self, value):\n pass", "def __set__(self, obj, value):\n\n return setattr(obj, '_' + self.name, value)", "def test_set_v3(self):\n\n test_vec = Vec3(1, 3, 5)\n test_vec.set(2, 4, 6)\n\n self.assertEqual(test_vec, Vec3(2, 4, 6))", "def __setitem__(self, key, value):\n if key not in self.fields:\n raise KeyError(key)\n return setattr(self, key, value)", "def test_slice_setslice_forbidden(self):\n global setVal\n class foo:\n def __setslice__(self, i, j, value):\n global setVal\n setVal = i, j, value\n def __setitem__(self, index, value):\n global setVal\n setVal = index, value\n\n foo()[::] = 23\n self.assertEqual(setVal, (slice(None, None, None), 23))\n foo()[::None] = 23\n self.assertEqual(setVal, (slice(None, None, None), 23))", "def testExceedingSetter(self):\n _1 = [ (self.kl[0], 3),\n (self.kl[1], 3),\n (Customer, 1), ] * 2\n self.o.population = _1\n _2 = self.o.population\n self.assertEqual(len(_2), 2, \"got {}\".format(_2))\n _expect = set([(self.kl[1], 4), (self.kl[0], 6), ])\n self.assertEqual(_2, _expect, \"something odd\")", "def set(self, key, value):\r\n self.set_many({key: value})", "def __set__(self, obj, val):\n msg = 'Cannot set the value of a Signal. Signals are read-only.'\n raise AttributeError(msg)", "def set(self, index, data):\n self.data[index] = data", "def __setitem__(self, *args, **kwargs): # real signature unknown\n pass", "def __setitem__(self, *args, **kwargs): # real signature unknown\n pass", "def __setitem__(self, *args, **kwargs): # real signature unknown\n pass", "def __setitem__(self, *args, **kwargs): # real signature unknown\n pass", "def test_convert_setter():\n foo = Value()\n foo.scalars = 1.2\n assert foo.scalars[0].value == 1.2", "def __set__(self, stack: \"stack.Stack\", value: Any):\n with self._lock:\n self.assign_value_to_stack(stack, value)", "def _setValue( self, client, value ):\n\t\treturn client.setValue( self.schema, value )", "def __setitem__(self, key, value):\n if '.' in key:\n self.assign(key, value)\n else:\n setattr(self, key, value)", "def __setitem__(self, attribute_name, value):\n pass # pragma: no cover", "def __setitem__(self, key, value):\n if not self._set:\n raise TypeError('This dict is read-only')\n self._set(key, value)", "def forced(setter):\n @wraps(setter)\n def __set__(desc, instance, value, forced=False):\n if forced:\n return setter(desc, instance, value)\n else:\n raise AttributeError(\"Cannot set a read-only attribute\")\n return __set__", "def __setitem__(self, key, value):\n self.set_attribute(key, value)", "def __setitem__(self, index, value):\n assert(isinstance(index,int)), \"Index should be an integer value\"\n assert(0 <= index < len(self.set)), \" Index out of bounds\"\n assert (isinstance(index, Fund)), \"Value should be a fund\"\n assert (value.is_valid()), \"Fund should contain data\"\n self.set[index] = value", "def __set__(self, instance, value):\r\n if instance:\r\n return instance._values[self.column.column_name].setval(value)\r\n else:\r\n raise AttributeError('cannot reassign column values')", "def _setVals(self, *args, **kwargs):\n pass", "def set(self):\n\n raise Exception(\"Can't set frmt.\")", "def test_oldclass_and_direct_set(self):\n global setVal\n class OldStyle:\n def __setitem__(self, index, value):\n global setVal\n setVal = index, value\n\n class OldStyleWithLen:\n def __setitem__(self, index, value):\n global setVal\n setVal = index, value\n def __len__(self):\n return 10\n\n class NewStyle(object):\n def __setitem__(self, index, value):\n global setVal\n setVal = index, value\n\n class OldStyleWithLenAndGetSlice:\n def __setitem__(self, index, value):\n global setVal\n setVal = index, value\n def __len__(self):\n return 10\n def __setslice__(self, start, stop, value):\n global setVal\n setVal = start, stop, value\n\n # slice object should pass through unmodified if constructed explicitly.\n NewStyle()[slice(None, -1, None)] = 123\n self.assertEqual(setVal, (slice(None, -1, None), 123))\n OldStyleWithLen()[slice(None, -1, None)] = 123\n self.assertEqual(setVal, (slice(None, -1, None), 123))\n OldStyle()[slice(None, -1, None)] = 123\n self.assertEqual(setVal, (slice(None, -1, None), 123))\n OldStyleWithLenAndGetSlice()[slice(None, -1, None)] = 123\n self.assertEqual(setVal, (slice(None, -1, None), 123))\n\n # using the slice syntax\n NewStyle()[:-1] = 123\n self.assertEqual(setVal, (slice(None, -1, None), 123))\n OldStyleWithLen()[:-1] = 123\n self.assertEqual(setVal, (slice(None, -1, None), 123))\n OldStyleWithLenAndGetSlice()[:-1] = 123\n self.assertEqual(setVal, (slice(None, -1), 123))\n OldStyle()[:-1:1] = 123\n self.assertEqual(setVal, (slice(None, -1, 1), 123))\n OldStyle()[:-1] = 123\n self.assertEqual(setVal, (slice(-1), 123))\n OldStyle()[-1:] = 123\n self.assertEqual(setVal, (slice(-1, None), 123))\n OldStyle()[:-1:None] = 123\n self.assertEqual(setVal, (slice(None, -1, None), 123))\n OldStyle()[-1::None] = 123\n self.assertEqual(setVal, (slice(-1, None, None), 123))\n OldStyle()[:-1:] = 123\n self.assertEqual(setVal, (slice(None, -1, None), 123))\n OldStyle()[-1::] = 123\n self.assertEqual(setVal, (slice(-1, None, None), 123))", "def __setitem__(self, name, obj):", "def test_set_passed_as_iterable():\n tree = Tree([10, 5, 100])\n assert tree.root.value == 10\n assert tree.root.left.value == 5\n assert tree.root.right.value == 100", "def dal_set(key, obj):\n global store\n return store.set(urllib.quote(key), obj)", "def set_value (self):\n raise NotImplementedError", "def put(self):\n pass", "def put(self):\n pass", "def __setitem__(self, key, value):\n self.put(key, value)", "def __setitem__(self, key, value):\n self.put(key, value)", "def __setitem__(self, key, value):\n if key in self.define:\n warnings.warn('Key {} is being overwritten to {}. It had a value of {}. Hope you know what you are doing.'.format(key, value, self.define[key]))\n self.define[key] = value", "def set(self, value):\n if value is None:\n self.value = [] if self.list else None\n else:\n value = self.cast(value)\n if self.list:\n self.value.append(value)\n else:\n self.value = value", "def put(self):\n self._val = True", "def __set__(self, instance, value):\n instance._values[self.name] = self.process(value)", "def set(key, value):\n return Cache.cache_connector.set(key, value)", "def __setattr__ (self, attr, value):\n self.set_value (attr, value)", "def __setitem__(self, key, value):\n if isinstance(key, (list, tuple)):\n self.assign_block(key, value)\n else:\n self.assign_value(key, value)", "def setLocal(name, value):", "def __init__(self, value=None):\n self.set(value)", "def __setitem__(self, k, v):\n\t\treturn setattr(self, k, v)", "def is_set(obj):\n return type(obj) is set", "def fset(self, value):\n message = \"Overriding a constant value is an illegal operation: {0} = {1}.\".format(\n name.__name__,\n value)\n raise TypeError(message)" ]
[ "0.837998", "0.8050189", "0.76336277", "0.7309809", "0.7295644", "0.7043105", "0.7043105", "0.7029955", "0.7029955", "0.68462026", "0.6810316", "0.6722299", "0.66905844", "0.6632549", "0.6594598", "0.6581449", "0.6524981", "0.63941556", "0.6374605", "0.6370601", "0.63417166", "0.6326663", "0.63190424", "0.6284263", "0.6279554", "0.62190026", "0.62190026", "0.62190026", "0.6163694", "0.6157133", "0.6157133", "0.6157133", "0.61466736", "0.61421424", "0.6121225", "0.6094558", "0.6059322", "0.6058187", "0.60435385", "0.60357034", "0.6035425", "0.6014776", "0.60035706", "0.5999947", "0.5990154", "0.5987226", "0.5985861", "0.5980716", "0.59541005", "0.5951765", "0.59265774", "0.5915754", "0.59128094", "0.59001297", "0.5888089", "0.5887787", "0.5883484", "0.5878398", "0.5874048", "0.58668953", "0.5862347", "0.58578223", "0.58573693", "0.5850523", "0.5847509", "0.5847509", "0.5847509", "0.5847509", "0.58429843", "0.5833106", "0.5830183", "0.58277", "0.5817784", "0.58147436", "0.581404", "0.58028376", "0.5797805", "0.5790202", "0.5787915", "0.57773906", "0.57753634", "0.57704127", "0.5765424", "0.5765113", "0.57645077", "0.575966", "0.575966", "0.5755816", "0.5755816", "0.57515186", "0.5750328", "0.5741857", "0.57413673", "0.57360756", "0.5728397", "0.5727943", "0.5725172", "0.57189125", "0.57113016", "0.57106274", "0.5705081" ]
0.0
-1
Escape value for commaseparated list
def list_escape(s): return re.sub(r'[\\,]', _escape_char, s)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def quote_list(the_list):\n return [\"'%s'\" % element for element in the_list]", "def _format_list_for_query(input_list):\n return (\n \", \".join(input_list).replace(\" \", \"\").replace(\"'\", \"\").replace(\",\", \"%2C\")\n )", "def escape_value(value: OptionValueType) -> OptionValueType:\n if isinstance(value, str):\n return shlex.quote(value)\n elif isinstance(value, Sequence):\n return [shlex.quote(v) for v in value]\n else:\n return value", "def _escapeString(self, value):\n if '\"' in value and \"'\" in value:\n substrings = value.split(\"\\\"\")\n result = [\"concat(\"]\n for substring in substrings:\n result.append(\"\\\"%s\\\"\" % substring)\n result.append(\", '\\\"', \")\n result = result[0:-1]\n if value.endswith('\"'):\n result.append(\", '\\\"'\")\n return \"\".join(result) + \")\"\n\n if '\"' in value:\n return \"'%s'\" % value\n return \"\\\"%s\\\"\" % value", "def escape(value):\n if value == \"*\":\n return value\n elif isinstance(value, str):\n return \"'{}'\".format(value.replace(\"'\", \"``\"))\n elif isinstance(value, bool):\n return \"TRUE\" if value else \"FALSE\"\n elif isinstance(value, (int, float)):\n return value\n elif isinstance(value, (list, tuple)):\n return \", \".join(escape(element) for element in value)", "def add_quote(item):\n if type(item) == str:\n return \"\\'\" + item + \"\\'\"\n else:\n return item", "def escape_list(l):\n return [_escape_harlowe_html(item) if isinstance(item, text_type) else str(item) for item in l]", "def _quote(v):\n return '\"' + v + '\"' if ' ' in v else v", "def surround(inp):\r\n if inp is list:\r\n for i in range(len(inp)):\r\n inp[i] = \"'\"+str(inp[i])+\"'\"\r\n return inp\r\n return \"'\"+str(inp)+\"'\"", "def escape_quotes(self, val):\n if val.startswith(self.quote) and val.endswith(self.quote):\n # make sure any previously escaped quotes are not re-escaped\n middle = val[1:-1].replace(\"\\\\\" + self.quote, self.quote)\n middle = middle.replace(self.quote, \"\\\\\" + self.quote)\n val = \"%s%s%s\" % (self.quote, middle, self.quote)\n\n return val", "def list_sugar(self):\n return 'list(', ')'", "def quoted(val: str) -> str:\n return f'\"{val}\"' if ' ' in val else val", "def _format_list(param_list: Iterable[Any]):\n fmt_list = []\n for item in param_list:\n if isinstance(item, str):\n fmt_list.append(f\"'{item}'\")\n else:\n fmt_list.append(f\"{item}\")\n return \",\".join(fmt_list)", "def escape_list(mylist, escape_func):\n def escape(obj, escape_func=escape_func):\n try:\n e = obj.escape\n except AttributeError:\n return obj\n else:\n return e(escape_func)\n return list(map(escape, mylist))", "def embeded_triple_quotes():\n pass", "def addslashes(val):\n return re.escape(val)", "def _escape_identifier(self, value):\n\n return value.replace('\"', '\"\"')", "def quote(value):\n return DoubleQuotedScalarString(value)", "def show_list(self, desc, lst, writeln):\n if not lst:\n return\n val = ', '.join([list_escape(v) for v in lst])\n writeln(\"%s: %s\" % (desc, val))", "def human_list(lst, connector='and'):\n # we don't want to listify non iterables\n if not getattr(lst, '__iter__', False):\n return lst\n else:\n s = ''\n max_idx = len(lst) - 1\n for i, item in enumerate(lst):\n if i == 0:\n t = '%s'\n elif i == max_idx and max_idx > 1:\n t = ', ' + connector + ' %s'\n elif i == max_idx and max_idx == 1:\n t = ' ' + connector + ' %s'\n else:\n t = ', %s'\n s += t % filter.conditional_escape(item)\n return mark_safe(s)", "def format_list(self, key, val, spacer):\n vals = list(map(str, val))\n\n if self.is_paired_list(key):\n # join the values together so each line has a pair\n vals = zip(vals[::2], vals[1::2])\n vals = [\"%s %s\" % (v[0], v[1]) for v in vals]\n \n s = self.newlinechar.join([spacer + v for v in vals])\n\n return s", "def escape(x):\n if '\\'' not in x:\n return '\\'' + x + '\\''\n s = '\"'\n for c in x:\n if c in '\\\\$\"`':\n s = s + '\\\\'\n s = s + c\n s = s + '\"'\n return s", "def replace_special_characters_in_list(self, full_list):\n return [n.replace(':','%3A') for n in full_list]", "def create_list_string(list_):\n return f\"[{' '.join(list_)}]\"", "def get_prep_value(self, value):\n if isinstance(value, list):\n return json.dumps([str(d) for d in value])\n\n return value", "def tokenlist(sep, item):\n return item + ZeroOrMore(sep + item) + Optional(sep)", "def __str__ (self):\n return f'\"{self.value[0]}|{self.value[1]}\"'", "def elem_quote(member):\n# member = str(member) # since we now stringify everything - this is probably a redundant command\n if member.find(\"'\") == -1:\n outline = \"'\" + member + \"'\"\n elif member.find('\"') == -1:\n outline = '\"' + member + '\"'\n else:\n outline = '\"' + member.replace('\"','&mjf-quot;')+'\"'\n return outline.replace('\\n','&mjf-lf;')", "def format_list(self, at_char, user, list_name):\r\n return u'<a href=\"http://%s/%s/%s\" data-list=\"\">%s%s/%s</a>' \\\r\n % (self.domain, user, list_name, at_char, user, list_name)", "def escape_jira_strings(v):\n if type(v) is str:\n return v.replace(r\"{\", r\"\\{\").replace(r\"[\", r\"\\[\")\n if type(v) is list:\n return [escape_jira_strings(x) for x in v]\n return escape_jira_strings(str(v))", "def encode_list(L):\n return \"&\".join([\"%s=%s\" % (index, element) for index, element in enumerate(L)])", "def quote_escape(value, lf='&mjf-lf;', quot='&mjf-quot;'):\n if '\\n' in value:\n value = value.replace('\\n', lf)\n if '\\'' in value and '\\\"' in value:\n value = value.replace('\"', quot)\n return value", "def argument_list_quote(arguments):\n args = []\n for arg in arguments:\n args.append(argument_quote(arg))\n return '\"%s\"' % ' '.join(args)", "def ps_filter(val):\n if isinstance(val, Undefined):\n return UNDEFINED_LABEL\n escaped = []\n for char in str(val):\n if char in \"`$#'\\\"\":\n char = \"`\" + char\n elif char == '\\0':\n char = \"`0\"\n elif char == '\\a':\n char = \"`a\"\n elif char == '\\b':\n char = \"`b\"\n elif char == '\\f':\n char = \"`f\"\n elif char == '\\n':\n char = \"`n\"\n elif char == '\\r':\n char = \"`r\"\n elif char == '\\t':\n char = \"`t\"\n elif char == '\\v':\n char = \"`v\"\n escaped.append(char)\n return ''.join(escaped)", "def _quote(self, arg):\n arg = arg.replace('\\\\', '\\\\\\\\')\n arg = arg.replace('\"', '\\\\\"')\n return '\"%s\"' % arg", "def format_list(my_list):\r\n\treturn \", \".join(my_list[::2]) + (\" and \" + my_list[-1])", "def ListToStr(val):\n return ''.join(['%c' % c for c in val])", "def shquote(arg):\n for c in '\"', \"'\", \"\\\\\", \"#\":\n if c in arg:\n return repr(arg)\n if arg.split() != [arg]:\n return repr(arg)\n return arg", "def paranthesis_list(output_name, input_var=_db_name):\n return '(' + delimitedList(input_var).setResultsName(output_name) + ')'", "def escape(self, value):\n return re.sub(r\"\\$\", \"$$\", value)", "def _escape(strings):\n ret = []\n for string in strings:\n if string == '[' or string == ']' or string == \"\\\"\":\n string = '\\\\' + string\n ret.append(string)\n return \"\".join(ret)", "def csv_quote_escape(self, the_string):\n the_string = the_string.replace('\"', r'\"\"')\n\n the_string = '\"' + the_string + '\"'\n\n return the_string", "def addpoemslashes(value):\n return value.replace(\"\\r\", \"\").replace(\"\\n\", ' / ')", "def sh_filter(val):\n if isinstance(val, Undefined):\n return UNDEFINED_LABEL\n escaped = []\n for char in str(val):\n if char in \"$#\\\"\":\n char = \"\\\\\" + char\n elif ord(char) < 32 or ord(char) > 126:\n char = \"\\\\%03o\" % ord(char)\n escaped.append(char)\n return ''.join(escaped)", "def from_list(l):\n if isinstance(l, str):\n for special_char in (' ', '\\n', '\\t', '(', ')', '\\\"'):\n if special_char in l:\n return '\\\"' + l + '\\\"'\n return l\n return '(' + ' '.join(from_list(e) for e in l) + ')'", "def list_string(join_list):\n joined_list = '[{}]'.format(join_list, join_list)\n return joined_list", "def paste(List, sep=''):\n strCombn =str()\n for i in range(len(List)):\n temp= f'{List[i]}'\n if (i==0):\n strCombn= temp\n else:\n strCombn = f'{strCombn}{sep}{temp}'\n \n return strCombn", "def html_quote(v):\n if v is None:\n return ''\n return cgi.escape(str(v), 1)", "def quote_path(path):\n return \"[%s]\" % \",\".join(\"'%s'\" % p for p in path)", "def shQuote(text):\n\treturn \"'%s'\" % text.replace(\"'\", r\"'\\''\")", "def daqStringMod(self, arg):\n\t\tself.stuff = []\n\t\tfor i in arg:\n\t\t\tself.stuff.append(\"\\'\" + i + \"\\'\")\n\t\treturn self.stuff", "def escape(text):\n if isinstance(text, list):\n for i, t in enumerate(text):\n t = t.replace(r'\\&', r'&amp;')\n t = t.replace(r'<', r'&lt;')\n t = t.replace(r'>', r'&gt;')\n text[i] = t\n else:\n text = text.replace(r'\\&', r'&amp;')\n text = text.replace(r'<', r'&lt;')\n text = text.replace(r'>', r'&gt;')\n return text", "def format_value(content):\n try:\n content += \"\"\n except TypeError:\n content = \", \".join(content)\n return remove_break_lines_characters(\n content).strip().replace('^', PRESERVECIRC)", "def make_safe(value, delimiter):\n if isinstance(value,list):\n return delimiter.join(map(lambda x: make_safe(x, delimiter),value))\n return str(value)", "def sqllist(lst):\n if isinstance(lst, basestring): \n return lst\n else:\n return ', '.join(lst)", "def _list_to_printable(value):\n fixed_items = []\n for item in value:\n if type(item) in (int, long, float):\n fixed_items.append(str(item))\n elif item == None:\n fixed_items.append(\"NULL\")\n elif type(item) == unicode:\n fixed_items.append(\"'%s'\" % item.replace(\"'\", \"''\"))\n elif type(item) == str:\n fixed_items.append(\"'%s'\" % str_to_unicode(item.replace(\"'\", \"''\")))\n else:\n raise Exception, \"Unsupported type '%s' given to _list_to_printable\" % type(item)\n\n return '(' + ','.join(fixed_items) + ')'", "def list(self, arg: SeField[Any]) -> str:\n if is_bare_list(arg.type):\n return arg.varname\n else:\n earg = arg[0]\n earg.name = \"v\"\n return f\"[{self.render(earg)} for v in {arg.varname}]\"", "def _list_to_printable(value):\n fixed_items = []\n for item in value:\n if isinstance(item, (int, float)):\n fixed_items.append(str(item))\n elif item == None:\n fixed_items.append(\"NULL\")\n elif isinstance(item, UNICODE_TYPE):\n fixed_items.append(\"'%s'\" % item.replace(\"'\", \"''\"))\n elif isinstance(item, BYTES_TYPE):\n fixed_items.append(\"'%s'\" % tostr(item.replace(\"'\", \"''\")))\n else:\n raise Exception(\"Unsupported type '%s' given to _list_to_printable\" % type(item))\n\n return '(' + ','.join(fixed_items) + ')'", "def _escapeArg(arg):\n #XXX There is a *lot* more that we should escape here.\n return arg.replace('\"', r'\\\"')", "def standardise_quotes(self, val):\n if val.startswith(self.altquote) and val.endswith(self.altquote):\n middle = val[1:-1]\n val = \"%s%s%s\" % (self.quote, middle, self.quote)\n\n val = self.escape_quotes(val)\n\n return val", "def get_prep_value(self, value):\n if isinstance(value, tuple) and len(value) == 2:\n return '[%s,%s]' % value\n return value", "def sqlquote(a):\n if isinstance(a, list):\n return _sqllist(a)\n else:\n return sqlparam(a).sqlquery()", "def quote_item(item, pre='', post=''):\n post = post or pre\n return f'{pre}{item}{post}'", "def escapeSolrArg(term):\n\tterm = term.replace('\\\\', r'\\\\') # escape \\ first\n\treturn \"\".join([nextStr for nextStr in escapedSeq(term)])", "def escaped(array):\n\n return list(map(re.escape, array))", "def escape_values(bfo):\n return 0", "def escape_values(bfo):\n return 0", "def escape_values(bfo):\n return 0", "def quote(value, *args, **kwargs):\n return parse.quote(encode(value, *args, **kwargs))", "def list2cmdline(seq):\n\n result = []\n needquote = False\n for arg in seq:\n bs_buf = []\n\n # Add a space to separate this argument from the others\n if result:\n result.append(' ')\n\n needquote = (\" \" in arg) or (\"\\t\" in arg) or (not arg) or (\"(\" in arg) or (\")\" in arg)\n if needquote:\n result.append('\"')\n\n for c in arg:\n if c == '\\\\':\n # Don't know if we need to double yet.\n bs_buf.append(c)\n elif c == '\"':\n # Double backslashes.\n result.append('\\\\' * len(bs_buf) * 2)\n bs_buf = []\n result.append('\\\\\"')\n else:\n # Normal char\n if bs_buf:\n result.extend(bs_buf)\n bs_buf = []\n result.append(c)\n\n # Add remaining backslashes, if any.\n if bs_buf:\n result.extend(bs_buf)\n\n if needquote:\n result.extend(bs_buf)\n result.append('\"')\n\n return ''.join(result)", "def _format_item_list(items, pad=\"'\", sep=', ', end_sep=' and '):\n result = ''\n items = [pad + item + pad for item in items]\n if items:\n if len(items) != 1:\n result = sep.join(items[:-1]) + end_sep + items[-1]\n else:\n result = items[0]\n return result", "def escape(self):\n pass", "def quote(m):\n return '\"' + m + '\"'", "def test_symlit_escape():\n return \"\\\"=\\\"\"", "def _value_needs_quotes(val):\n if not val:\n return None\n val = \"\".join(str(node) for node in val.filter_text(recursive=False))\n if not any(char.isspace() for char in val):\n return None\n if \"'\" in val and '\"' not in val:\n return '\"'\n if '\"' in val and \"'\" not in val:\n return \"'\"\n return \"\\\"'\" # Either acceptable, \" preferred over '", "def _fmt_list(self, string_list):\n return self._fmt_csv(string_list, list_braces=\"[]\")", "def shellify(val):\n\n if val==None:\n s=''\n elif not isinstance(val,str):\n s=str(val)\n else:\n return shlex.quote(val)\n return shlex.quote(s)", "def fmt(self, val):\n if type(val) in self.QUOTABLE_TYPES:\n s = decode_string(val)\n return u\"{0}{1}{2}\".format(self.quotechar, s, self.quotechar)\n else:\n return decode_string(str(val))", "def format(lis):\n if lis:\n return \";\".join(\",\".join(str(i) for i in n) for n in lis)\n else:\n return \"NULL\"", "def print_avec_separateur(self, separateur=\" \"):\r\n print(\"[\", end=\" \")\r\n if self.first() is not None:\r\n self.first().print_list_avec_separateur(separateur)\r\n print(\"]\")", "def get_quoted_cs_string(self, key, default=None):\n quoted_list = self.get_quoted_list(key, default)\n quoted_cs_string = ','.join(map(str, quoted_list))\n\n return quoted_cs_string", "def process_list(a_list: list):\n\n return ', '.join(str(s) for s in a_list) if a_list else Presenter.DEFAULT", "def test_escape_value_as_list(self):\r\n testdict = escapeddict.EscapedDict({'key1': 'value1', 'key2': ['value2', '${key1}']})\r\n for key in testdict.keys():\r\n print testdict[key]\r\n assert testdict['key1'] == 'value1'\r\n assert testdict['key2'] == ['value2', 'value1']", "def quote(value):\n single = value.find(\"'\")\n double = value.find('\"')\n multiline = value.find('\\n') != -1\n if multiline or ((single != -1) and (double != -1)):\n if value.find('\"\"\"') == -1 and value[0] != '\"' and value[-1] != '\"':\n s = '\"\"\"%s\"\"\"' % value\n else:\n s = \"'''%s'''\" % value\n elif (single != -1) and (double == -1):\n s = '\"%s\"' % value\n else:\n s = \"'%s'\" % value\n return s", "def _fmt_csv(string_list, list_braces = None):\n if len(string_list) == 0:\n return \"\"\n first = True\n str_ = \"\"\n if list_braces != None:\n str_ += list_braces[0]\n for string in string_list:\n if string != None:\n if first:\n first = False\n else:\n str_ += \", \"\n str_ += string\n if list_braces != None:\n str_ += list_braces[1]\n return str_", "def json_escape(context, value):\n\n return json.dumps(value).strip('\"')", "def escape(value):\n \n value = stringify(value)\n return value.replace('$', '$$')", "def string_list(out, name, items):\n print(f\"const char* const {name}[] = {{\", file=out)\n for item in items:\n print(f\" \\\"{item}\\\",\", file=out)\n print(\" nullptr,\", file=out)\n print(\"};\", file=out)\n print(\"\", file=out)\n pass", "def _quote_arguments(args):\n return map(lambda x: '\"{}\"'.format(x) if ' ' in x else '{}'.format(x), args)", "def cleaned_string(val):\r\n return urllib.quote_plus(smart_str(val))", "def quote_identifier(self, value):\n\n return self.initial_quote + self._escape_identifier(value) + self.final_quote", "def EncodePOSIXShellList(lst):\n\n encoded_arguments = []\n for argument in lst:\n encoded_arguments.append(EncodePOSIXShellArgument(argument))\n return ' '.join(encoded_arguments)", "def replace_quotes(item: str) -> str:\n # A list is used because it can be appended easily.\n final_str_arr = []\n\n for n, char in enumerate(item):\n # only operate if the previous char actually exists\n if n - 1 < 0:\n if char != '\"':\n final_str_arr.append(char)\n\n continue\n\n # Complex quoting rules!\n # If it's a SINGLE backslash, don't append it.\n # If it's a double backslash, append it.\n if char == '\\\\':\n if item[n - 1] == \"\\\\\":\n # double backslash, append it\n final_str_arr.append(char)\n\n continue\n\n if char == '\"':\n # check to see if it's escaped\n if item[n - 1] == '\\\\':\n # if the last char on final_str_arr is NOT a backslash, we want to keep it.\n if len(final_str_arr) > 0 and final_str_arr[-1] != '\\\\':\n final_str_arr.append('\"')\n\n continue\n\n # None of the above were hit, so add it anyway and continue.\n final_str_arr.append(char)\n\n return \"\".join(final_str_arr)", "def open_quote(self):\n self.message += '{'", "def _escapePaths(self, paths):\n cmd = \"\"\n for p in paths:\n if ' ' in p:\n cmd += ' \"{0}\"'.format(p)\n else:\n cmd += ' ' + p\n return cmd", "def encode_commands(command_list: List[str]) -> List[str]:\n return ['-'.join(x.split(' ')) for x in command_list]", "def test_list(self):\n self.assertValue(\n ['foo', 'bar', 'hello'],\n 'foo\\nbar\\nhello\\n')", "def quot(string):\r\n return string.replace('\"', \"'\")", "def format_value(value):\n return value.replace('|', ' ')", "def _render_list_to_string(self, alist):\n return \",\".join(self._render_row(alist))" ]
[ "0.6734296", "0.64581853", "0.6446276", "0.64394754", "0.63908434", "0.6385253", "0.63293046", "0.632233", "0.61730325", "0.59699094", "0.59477973", "0.5917379", "0.5887457", "0.5872731", "0.58519614", "0.58464056", "0.5803603", "0.58012146", "0.579517", "0.57884943", "0.57869935", "0.57720137", "0.57464397", "0.5740253", "0.5716313", "0.57115513", "0.56998414", "0.56933457", "0.5687312", "0.5686963", "0.5680976", "0.56803316", "0.56685436", "0.5655259", "0.56536543", "0.56425697", "0.5637439", "0.562655", "0.5605198", "0.559785", "0.55944073", "0.5587281", "0.5586333", "0.5581371", "0.5574677", "0.55692184", "0.5565896", "0.55624866", "0.5552008", "0.5545793", "0.55296504", "0.5515702", "0.5508817", "0.55015904", "0.5492674", "0.54773945", "0.5476041", "0.54721373", "0.54680246", "0.5439801", "0.54110515", "0.5408623", "0.539941", "0.5395261", "0.5393636", "0.53930503", "0.53930503", "0.53930503", "0.5388203", "0.538662", "0.5385108", "0.5372863", "0.5366409", "0.53513587", "0.5347492", "0.5339276", "0.533617", "0.5327326", "0.5318795", "0.5317996", "0.5311087", "0.529645", "0.5294089", "0.5283948", "0.52786314", "0.5260471", "0.5257714", "0.5248415", "0.5248185", "0.5245812", "0.5241641", "0.5241389", "0.5227535", "0.5226184", "0.52242595", "0.5214976", "0.52068126", "0.52018994", "0.52006537", "0.5196636" ]
0.68377125
0
Convert DistinguishedName dict to '/'separated string.
def render_name(name_att_list): res = [''] for k, v in name_att_list: v = dn_escape(v) res.append("%s=%s" % (k, v)) res.append('') return '/'.join(res)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def convert_dn(dn):\n if re.match(\"^/.*\", dn):\n return dn\n\n new_dn = \"\"\n attrs = dn.split(\",\")\n for attr in attrs:\n prm_tuple = attr.split(\"=\")\n k = prm_tuple[0].strip()\n v = prm_tuple[1].strip()\n new_dn = new_dn + f'/{k}={v}'\n return new_dn", "def Serialize(cls, dn, separator=\"/\", sort=True):\n\n if separator not in cls.VALID_SEPARATORS:\n raise X509SubjectNameConfigError(\"Invalid field separator %r\" % separator)\n\n # If using '/' then prepend DN with an initial '/' char\n if separator == \"/\":\n s_dn = separator\n else:\n s_dn = \"\"\n\n dn_list = []\n for key, val in dn.items():\n if val:\n if isinstance(val, tuple):\n kv_pairs = [\"%s=%s\" % (key, val_sub) for val_sub in val]\n dn_list += [separator.join(kv_pairs)]\n else:\n dn_list += [\"%s=%s\" % (key, val)]\n\n if sort:\n dn_list.sort()\n\n s_dn += separator.join(dn_list)\n\n return s_dn", "def domainroot(d):\n dn = str2dn(d)\n dn.pop(0)\n return dn2str(dn)", "def format_name(name_dir):\n if(name_dir.endswith('/')):\n name_dir = name_dir.rstrip('/')\n return(name_dir)", "def pathify(value):\n \n value = stringify(value)\n return value.replace('$ ', '$$ ').replace(' ', '$ ').replace(':', '$:')", "def convert_unc(host, path):\n return ''.join(['\\\\\\\\', host, '\\\\', path.replace(':', '$')])", "def drivesize_str(self):\n return re.search(r\"%s:([^,]+)\" % self.device, self.driveinfo).group(1)", "def __str__(self):\n basename = []\n for key, val in self.entities.items():\n if key not in ('prefix', 'suffix') and \\\n val is not None:\n _check_key_val(key, val)\n # convert certain keys to shorthand\n if key == 'subject':\n key = 'sub'\n if key == 'session':\n key = 'ses'\n if key == 'acquisition':\n key = 'acq'\n if key == 'processing':\n key = 'proc'\n if key == 'recording':\n key = 'rec'\n basename.append('%s-%s' % (key, val))\n\n if self.suffix is not None:\n basename.append(self.suffix)\n\n basename = '_'.join(basename)\n if self.prefix is not None:\n basename = op.join(self.prefix, basename)\n\n return basename", "def compose(cls, decomposed_dict):\n composed = ''\n\n def get(key):\n return decomposed_dict.get(key, '').strip()\n\n name = get('name')\n\n if name:\n composed = '%s%s=' % (composed, name)\n\n source = get('source')\n composed = '%s%s' % (composed, source)\n\n target = get('target')\n if not cls.is_wildcard(target):\n composed = '%s#%s' % (composed, target)\n\n return composed", "def __rfc_to_openssl(user_dn):\n dn_parts = [x.strip() for x in user_dn.split(',')]\n dn_parts.reverse()\n return '/%s' % '/'.join(dn_parts)", "def build_path(key_dict, path_string):\n for key, value in key_dict.items():\n path_string = re.sub('\\$\\{' + key + '\\}', value, path_string)\n\n return path_string", "def _get_pretty_name(name):\n pretty = ''\n if name.countryName:\n pretty += '/C=' + name.countryName\n if name.stateOrProvinceName:\n pretty += '/ST=' + name.stateOrProvinceName\n if name.localityName:\n pretty += '/L=' + name.localityName\n if name.organizationName:\n pretty += '/O=' + name.organizationName\n if name.organizationalUnitName:\n pretty += '/OU=' + name.organizationalUnitName\n if name.commonName:\n pretty += '/CN=' + name.commonName\n if name.emailAddress:\n pretty += '/email=' + name.emailAddress\n return pretty", "def _namespace_to_unicode(self):\n\t\treturn u\":\".join(self.namespace_parts)", "def as_key(key):\n return key.lstrip('/').rstrip('/')", "def sanatize_path(self, path):\n # Remove extra whitespace\n path = path.strip()\n\n # Remove slash from end of path\n path = path.rstrip(os.sep)\n\n return path", "def repair_path(dict_1):\n dup_dict = dict(dict_1)\n for k,v in dup_dict.items():\n if '\\\\' in k:\n key = k.replace('\\\\', '/')\n val = v.replace('\\\\', '/')\n del dict_1[k]\n dict_1[key] = val\n return dict_1", "def _canonify(self, rut):\n rut = smart_unicode(rut).replace(' ', '').replace('.', '').replace('-', '')\n return rut[:-1], rut[-1]", "def str_dict(d):\n return \", \".join(\"%s=%s\" % (k, d[k]) for k in d)", "def join_path(d, root):\n if isinstance(d, dict):\n if 'path' in d:\n d['path'] = os.path.join(root, d['path'])\n root = d['path']\n for item in d:\n d[item] = join_path(d[item], root)\n\n elif isinstance(d, list):\n d = [join_path(item, root) for item in d]\n\n return d", "def normalize_directory_name(directory_name: str) -> str:\n return directory_name.lower()", "def convert_x509_name(name):\n types = {\n 'country_name': 'C',\n 'state_or_province_name': 'ST',\n 'locality_name': 'L',\n 'organization_name': 'O',\n 'organizational_unit_name': 'OU',\n 'common_name': 'CN',\n 'email_address': 'emailAddress'\n }\n\n return '/'.join(['{}={}'.format(types[attr], name.native[attr]) for attr in name.native])", "def _format_account_with_institution_57C(self, val):\n if val:\n val = \"/\" + str(val)\n return val", "def to_string(cls, hierarchical_dict: dict) -> str:\n keys = cls.get_all_keys(hierarchical_dict)\n keys = sorted(keys)\n res = \"\"\n for key in keys:\n res += f\"{key} = {FuseUtilsHierarchicalDict.get(hierarchical_dict, key)}\\n\"\n\n return res", "def get_dict_str(d: dict) -> str:\n\treturn str({str(u): str(v) for u, v in d.items()})", "def build_flattened_key(prefix, key):\n return key if not prefix else prefix + \".\" + key", "def join_path(values: t.List[str]) -> str:\n from axonius_api_client.tools import listify\n\n return \" => \".join(listify(values))", "def _form_service_key(self, service_name, service_addr):\n return '/'.join((service_name, service_addr))", "def join(self, *parts):\n if parts:\n parts = list(parts)\n if len(parts) > 1:\n for i, p in enumerate(parts[:-1]):\n parts[i] = p.strip('/')\n parts[-1] = parts[-1].lstrip('/')\n return '/'.join(parts)", "def showd(d):\r\n return ' '.join([':%s %s' % (k,v)\r\n for k,v in\r\n sorted(d.items())\r\n if not \"_\" in k])", "def _format_intermediary_institution_56C(self, val):\n if val:\n val = \"/\" + str(val)\n return val", "def key_join(self, key, encode=True):\n if isinstance(key, str):\n parts = key.split('/')\n else:\n parts = key\n new_parts = []\n\n for part in parts:\n if isinstance(part, bytes):\n part = part.decode(\"utf-8\")\n if encode:\n part = quote(str(part))\n new_parts.append(part)\n\n return '/'.join(new_parts)", "def _construct_path(self, sep, with_drive_letter=True):\n result = sep.join(self._components)\n if self._absolute:\n result = \"{}{}\".format(sep, result)\n if with_drive_letter and self._drive_letter:\n result = \"{}:{}\".format(self._drive_letter, result)\n return result", "def _format_environment_value(value):\n value = str(value)\n if platform.system() == \"Windows\":\n # Split on semicolons first\n components = value.split(os.pathsep)\n\n # On each component, replace anything that looks like\n # a drive letter with a unix-like drive path.\n components = [re.sub(r\"^([A-Za-z]):\\\\\",\n r\"\\\\\\1\\\\\",\n c) for c in components]\n\n return \":\".join(components).replace(\"\\\\\", \"/\")\n\n return value", "def sd_dict_to_syslog_str(cls, sd_dict):\n syslog_sds = ''\n for sd_key, sd_val in list(sd_dict.items()):\n syslog_str = '[{sd_key}'.format(sd_key=sd_key)\n\n for sub_key, sub_val in list(sd_val.items()):\n syslog_str = '{orig} {key}=\"{value}\"'.format(\n orig=syslog_str, key=sub_key, value=sub_val)\n syslog_str += ']'\n\n syslog_sds += syslog_str\n\n return syslog_sds", "def sanitize_fname(directory, fname):\n return opath.join(\n bytes(directory, encoding='ascii'),\n opath.normpath(\n b'/' + fname).lstrip(b'/'))", "def key2basename(self, key):\n for char, replacement in self.dangerous_chars.items():\n key = key.replace(char, replacement)\n return key", "def normalize_orcid(val):\n for orcid_url in orcid_urls:\n if val.startswith(orcid_url):\n val = val[len(orcid_url) :]\n break\n val = val.replace(\"-\", \"\").replace(\" \", \"\")\n\n return \"-\".join([val[0:4], val[4:8], val[8:12], val[12:16]])", "def getFormattedDirectory(directory):\n outdir = directory\n if not(outdir.endswith(\"/\")):\n outdir = outdir+\"/\"\n return outdir", "def undotted_keys(dict):\n return {k.lstrip(\".\"): v for k, v in dict.items()}", "def __unicode__(self):\n d = ((2, \".\"), (6, \".\"), (10, \"/\"), (15, \"-\"))\n s = list(map(str, self.cnpj))\n \n for i, v in d:\n s.insert(i, v)\n \n r = ''.join(s)\n \n return r", "def flatten_hostname(hostname):\n return hostname.replace('.', '-')", "def full_name(self):\n path = [str(p) for p in self.path]\n # TODO add further checks, the mapping should only occur on stdlib.\n try:\n path[0] = self._mapping[path[0]]\n except KeyError:\n pass\n for key, repl in self._tuple_mapping.items():\n if tuple(path[:len(key)]) == key:\n path = [repl] + path[len(key):]\n\n return '.'.join(path if path[0] else path[1:])", "def _formatPath(directoryPath, filePath):\n return directoryPath + \"\\\\\" + filePath", "def _format_dict(self, dict):\n\n result = \"\"\n for k, v in dict.items():\n result += \"\\n{0}: {1}\".format(k.capitalize(), v)\n\n return result", "def _normalize(self, dictionnary):\r\n copy_dict = OrderedDict()\r\n for k,v in dictionnary.items():\r\n if isinstance(v, OrderedDict):\r\n copy_dict[k.replace('#','').replace('@','')] = self._normalize(v)\r\n else:\r\n copy_dict[k.replace('#','').replace('@','')] = v\r\n return copy_dict", "def parse_dn(dnstr):\n res = []\n for part in loop_escaped(dnstr, '/'):\n part = part.strip()\n if not part:\n continue\n if '=' not in part:\n raise InvalidCertificate(\"Need k=v in Name string\")\n k, v = part.split('=', 1)\n res.append((k.strip(), v.strip()))\n return res", "def normalize_gnd(val):\n if val.startswith(gnd_resolver_url):\n val = val[len(gnd_resolver_url) :]\n if val.lower().startswith(\"gnd:\"):\n val = val[len(\"gnd:\") :]\n return \"gnd:{0}\".format(val)", "def convert_single_relation_url_to_simplified_format(relation_url):\n relation_url = relation_url.strip()\n prefix = 'www.freebase.com/'\n if not relation_url.startswith(prefix):\n raise Exception(\"Invalid format of relation '{}', expected prefix '{}'\".format(relation_url, prefix))\n return relation_url[len(prefix):].replace('/', '.').strip()", "def parse(cls, dn, separator=None):\n if separator in (\"/\", None):\n parser_re = cls.SLASH_PARSER_RE\n elif separator == \",\":\n parser_re = cls.COMMA_PARSER_RE\n else:\n raise X509SubjectNameConfigError(\"Invalid field separator %r\" % separator)\n\n dn_fields = parser_re.split(dn)\n if len(dn_fields) < 2:\n raise X509SubjectNameConfigError('Error parsing DN string: \"%s\"' % dn)\n\n items = zip(dn_fields[1::2], dn_fields[2::2])\n\n # Strip leading and trailing space chars and convert into a\n # dictionary\n parsed_dn = {}\n for key, val in items:\n key = key.strip()\n if key in parsed_dn:\n if isinstance(parsed_dn[key], tuple):\n parsed_dn[key] = tuple(list(parsed_dn[key]) + [val])\n else:\n parsed_dn[key] = (parsed_dn[key], val)\n else:\n parsed_dn[key] = val\n\n return parsed_dn", "def dict2str(dic):\n return ','.join(\"%s=%s\" % (key, val)\n for key, val in sorted(dic.items()))", "def show_account(self, obj): # pylint: disable=no-self-use\n return '%s\\\\%s' % (obj.domain, obj.username)", "def normalize(cls, target):\n parts = collections.deque()\n for p in target.split('/'):\n if (p == '' or p == '.') and len(parts) > 0:\n pass\n elif p == '..' and len(parts) > 0:\n parts.pop()\n else:\n parts.append(p)\n return '/'.join(parts)", "def remove_dash_and_underscore_from_key(d): # type: ignore\n\n if not isinstance(d, (dict, list)):\n return d\n elif isinstance(d, list):\n return [\n value\n for value in (\n remove_dash_and_underscore_from_key(value) for value in d\n )\n ]\n else:\n return {\n pascal_case(key): remove_dash_and_underscore_from_key(value)\n for key, value in d.items()\n }", "def _isolated_path_format(self, path):\n if self._root_dir.is_parent_of(path):\n return '%s:%s' % (\n self._root_dir,\n self._api.path.join(*path.pieces[len(self._root_dir.pieces):])\n )\n else:\n assert path == self._root_dir, \\\n \"isolated path must be equal to or within %s\" % self._root_dir\n return '%s:.' % self._root_dir", "def domain_dns_name(self):\n domain_dn = self.get_default_basedn()\n return domain_dn.canonical_str().split('/')[0]", "def fix_dir_separator(slash_delim_path):\n return slash_delim_path.replace('/', os.path.sep)", "def _build_directory_structure_string(structure):\n def _recurse_dic(dic, level, prefix, buf):\n idx = 0\n for key, value in dic.items():\n idc = \"┣━\"\n if idx == len(dic.keys()) - 1:\n idc = \"┗━\"\n if level == 0:\n idc = \"\"\n\n if isinstance(value, dict):\n buf.append(\"{0}{1}[{2}]\".format(prefix, idc, key))\n if len(dic.keys()) > 1 and idx != len(dic.keys()) - 1:\n tmp_prefix = prefix + \"┃ \"\n else:\n tmp_prefix = prefix + \" \"\n _recurse_dic(value, level + 1, tmp_prefix, buf)\n else:\n buf.append(\"{0}{1}{2}\".format(prefix, idc, key))\n\n idx += 1\n\n buf = []\n _recurse_dic(structure, 0, \"\", buf)\n return \"\\n\".join(buf)", "def _dict_to_string(self, dictionary):\n st = ''\n for i in range(len(dictionary)):\n st = st + dictionary[i]\n return st", "def key_to_string(cls, key):\n return '_'.join(map(str, key))", "def format_path(path):\n if not path:\n return path\n\n path = re.sub(r'/+', '/', path)\n\n if path == '/':\n return (u\"\" if isinstance(path, unicode) else \"\")\n else:\n return '/' + path.strip('/')", "def _urlize(self, *args):\n\n return \"csod\" + \"/\".join(map(self._cleanString, args))", "def _clean_name(self, name):\n # Useful for windows' paths\n return os.path.normpath(name).replace(\"\\\\\", \"/\")", "def str_join(paths: []):\n return \"/\".join(paths)", "def clean_directory_path(path):\n allowed = string.digits + string.ascii_letters + string.whitespace\n stripped = \"\".join(c for c in path if c in allowed)\n return stripped.replace(\" \", \"_\")", "def normalize_principal(principal):\n (user, realm) = split_principal(principal)\n return unicode('%s@%s' % (user, realm))", "def compact_idstr(dict_):\n from netharn import util\n import ubelt as ub\n short_keys = util.shortest_unique_prefixes(dict_.keys())\n short_dict = ub.odict(sorted(zip(short_keys, dict_.values())))\n idstr = ub.repr2(short_dict, nobr=1, itemsep='', si=1, nl=0,\n explicit=1)\n return idstr", "def _build_fullname(tree: dict) -> None:\n def _apply(item: dict) -> None:\n components = item.pop(\"components\")\n try:\n idx = components[::-1].index(None)\n except ValueError:\n pass\n else:\n components = components[len(components) - idx:]\n if components:\n item[\"fullname\"] = \".\".join(components)\n else:\n item[\"fullname\"] = None\n apply_tree(tree, _apply)", "def shorter_name(key):\n key_short = key\n for sep in ['#', '/']:\n ind = key_short.rfind(sep)\n if ind is not None:\n key_short = key_short[ind+1:]\n else:\n key_short = key_short\n return key_short.replace('-', '_').replace('.', '_')", "def format_path(path):\n return path if path.endswith('/') else path + '/'", "def path_join(first: str, second: str) -> str:\n first = first.rstrip('/\\\\')\n second = second.lstrip('/\\\\')\n if not first: return second\n if not second: return first\n return first + '/' + second", "def _format_key(self, k: str) -> str:\n if k[0] == '.':\n k = k[1:]\n k = k.replace('.', '_')\n k = k.upper()\n k = re.sub(self.KEY_REGEX, '', k)\n return k", "def convert_values_to_strings(dict,list_sep=', '):\n d=dict.copy()\n for key in dict.keys():\n if not isinstance(dict[key],str):\n try:\n d[key]=list_sep.join(dict[key])\n except:\n del d[key]\n return d", "def flatten(d, parent_key='', sep='_'):\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, collections.MutableMapping):\n items.extend(flatten(v, new_key, sep=sep).items())\n else:\n items.append((new_key, v))\n items = dict(items)\n # remove info like PCA primitive ID\n items_not_strings = {k: v for k, v in items.items() if type(v) != str}\n return dict(items_not_strings)", "def stringify_keys(d):\n di = copy.deepcopy(d)\n for key in di.keys():\n # check inner dict\n if isinstance(d[key], dict):\n value = stringify_keys(d[key])\n else:\n value = d[key]\n\n # convert nonstring to string if needed\n if not isinstance(key, str):\n try:\n d[str(key)] = value\n except Exception:\n try:\n d[repr(key)] = value\n except Exception:\n raise\n\n # delete old key\n del d[key]\n return d", "def unders_to_dashes_in_keys(self) -> None:\n for key_node, _ in self.yaml_node.value:\n key_node.value = key_node.value.replace('_', '-')", "def osnorm(self):\n import os\n if os.sep=='/' and \"\\\\\" in str(self):\n return Path(os.path.normpath(str(self).replace('\\\\','/' )))\n elif os.sep=='\\\\' and \"/\" in str(self):\n return Path(os.path.normpath(str(self).replace('/','\\\\' )))\n else:\n return self.norm()", "def directory_slash(destination):\n\n if destination[-1] != '/':\n return destination + '/'\n\n return destination", "def _encode(dictionary):\n # Strip ugly base64 padding.\n byteStr = bytearray(json.dumps(dictionary).encode())\n encodedStr = base64.urlsafe_b64encode(byteStr)\n return encodedStr.rstrip('='.encode())", "def make_paths_safe(params):\n for key in params.keys():\n if isinstance(params[key], str):\n params[key] = params[key].replace(\"/\", os.sep)\n params[key] = params[key].replace(\"\\\\\", os.sep)\n\n return params", "def clean_keys_of_slashes(record):\n for key in list(record):\n value = record[key]\n if \"/\" in key:\n # replace with _\n record[key.replace(\"/\", \"_\")] = record.pop(key)\n # Check if the value is a list containing nested dict and apply same\n if value:\n if isinstance(value, list) and isinstance(value[0], dict):\n for v in value:\n clean_keys_of_slashes(v)\n\n return record", "def compose_to_json(cls, decomposed_dict):\n\n def get(key):\n return decomposed_dict.get(key, '').strip()\n\n name = get('name')\n if not name:\n raise EndpointError('Decomposed endpoint must have a name')\n\n value = ''\n\n source = get('source')\n if source != name:\n value += source\n\n target = get('target')\n if not value:\n if cls.is_wildcard(target):\n value += '*'\n else:\n if '/' in target:\n value += '#' + target\n else:\n value += target\n\n elif not cls.is_wildcard(target) or not cls.RE_SOURCE.findall(source):\n value += '#' + (target or '*')\n\n return {name: value}", "def canonicalPath(path_or_object):", "def prefix_value(s):\n forbidden = forbidden_chars.intersection(s)\n if forbidden:\n raise ValueError('%(s)s contains forbidden characters'\n ' (%(forbidden)s)'\n % locals())\n stripped = s.strip('/')\n if stripped:\n return stripped.join('//')\n return '/'", "def dictKeysToCSV(d):\n return \",\".join([str(val) for val in nestedDictKeys(d)])", "def do_dict_join(d, k_sep=\".\", v_sep=\"=\"):\n \"\"\"Helper function for function do_dict_join\"\"\"\n res = []\n for k, v in d.iteritems():\n if isinstance(v, dict):\n new_res = map(lambda el: \"{}{}{}\".format(k, k_sep, el),\n do_dict_join(v, k_sep, v_sep))\n\n res.extend(new_res)\n else:\n res.append(\"{}{}{}\".format(k, v_sep, v))\n return res", "def composePath(self,splitedPath):\n # 027 It is used anywhere?? Nope!! Remove!\n\n self.debug.printHeader()\n return os.sep.join(splitedPath)", "def build_path_device(device_id):\n padding_device = PAIRS_SHINGLES_DEVICE * 2\n s = padding_zeroes(int(int(device_id) % NUMBER_DEVICES), padding_device)\n res = ''\n for i in range(0, padding_device, 2):\n res += s[i: i+2] + '/'\n return res", "def dict2str(d):\n els = ['%s=%s' % (k,v) for k,v in sorted(d.items())]\n return seq2str(els)", "def domain_string(domain, path_set):\n out = domain + '\\n' + '\\n'.join(list(path_set)) + '\\n\\n\\n'\n return out", "def _construct_url(parts):\n results = []\n last_idx = len(parts) - 1\n for n, part in enumerate(parts):\n if n > 0:\n part = part.lstrip('/')\n if n < last_idx:\n part = part.rstrip('/')\n if part:\n results.append(part)\n return '/'.join(results)", "def construct_path(id_val):\n id_val = str(id_val)\n path = id_val[:3] + \"/\" + id_val[3:6] + \"/\" + id_val[6:9] + \"/\"\n path += id_val\n return path", "def _dirname(self, key):\n return os.path.join(self.root, key[:2])", "def stripslashes(val):\n return val.replace('\\\\', '')", "def fix_path(name):\n saveslash = \"/\" if (name[0] == \"/\") else \"\"\n name = re.split(\"\\\\\\|/\", name)\n new = name[0]\n for i in range(1,len(name)):\n new = os.path.join(new, name[i])\n new = \"%s%s\" % (saveslash, new)\n return new", "def safepath(p):\n return p.replace('/', os.sep)", "def normalizePath(path):\n if path == None or len(path) == 0 or path == '/':\n return '/'\n buff = '/' + path if path[0] != '/' else path\n return buff.replace('//', '/')", "def get_domainURI(self):\n return \"{0}/cdmi_domains/radon/\".format(self.api_root)", "def get_domainURI(self):\n return \"{0}/cdmi_domains/radon/\".format(self.api_root)", "def nt_path_to_posix_path(path):\r\n path = path.replace(\"\\\\\", \"/\")\r\n parts = path.split(\":\")\r\n if len(parts) > 1:\r\n return \"/\" + parts[0].lower() + parts[1]\r\n return path", "def ldap_filter(val):\n if isinstance(val, Undefined):\n return UNDEFINED_LABEL\n escaped = []\n for char in str(val):\n if char < '0' or char > 'z' or char in \"\\\\*()\":\n char = \"\\\\%02x\" % ord(char)\n escaped.append(char)\n return ''.join(escaped)" ]
[ "0.6659733", "0.5775696", "0.5740528", "0.5423116", "0.54046327", "0.5284252", "0.52661365", "0.5201889", "0.5192248", "0.51429164", "0.50932664", "0.5080492", "0.5076293", "0.5024254", "0.50174654", "0.5016909", "0.5004144", "0.50006694", "0.4990777", "0.4978432", "0.49433088", "0.49393588", "0.49202508", "0.49180797", "0.49029782", "0.49011016", "0.48931116", "0.48685828", "0.48674035", "0.48593268", "0.48475838", "0.48314396", "0.48289785", "0.4828095", "0.4827003", "0.48260266", "0.48111203", "0.48097867", "0.48069283", "0.48062223", "0.47991407", "0.47796422", "0.47743583", "0.47691074", "0.47630426", "0.47574422", "0.4727841", "0.4727006", "0.47232795", "0.47216225", "0.4716026", "0.46973962", "0.46948063", "0.468751", "0.4680741", "0.46738723", "0.46691707", "0.4661252", "0.46449086", "0.464077", "0.46343297", "0.46340513", "0.46271238", "0.46257854", "0.46244425", "0.46202564", "0.46145833", "0.46121338", "0.46099606", "0.46093112", "0.46089938", "0.46079728", "0.46066317", "0.459598", "0.45921937", "0.45912957", "0.45868483", "0.4579888", "0.45771644", "0.45695275", "0.45686984", "0.45513794", "0.45501417", "0.4547888", "0.4546328", "0.45386016", "0.45379657", "0.4536287", "0.45362574", "0.45349488", "0.45341823", "0.4531491", "0.45097697", "0.45095554", "0.45057422", "0.4505617", "0.4503533", "0.4503533", "0.44950742", "0.44934222" ]
0.53524345
5
Parse argument value with function if string.
def maybe_parse(val, parse_func): if val is None: return [] if isinstance(val, (bytes, str)): return parse_func(val) if isinstance(val, dict): return list(val.items()) if isinstance(val, (list, tuple)): return list(val) return val
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def eval_arg(arg_value, arg_name=''):\n if arg_name.lower().endswith('_list') and isinstance(arg_value, str):\n return [eval_arg(cell) for cell in arg_value.split(',')]\n if not isinstance(arg_value, str):\n return arg_value\n if arg_value.lower() in ['true', 'false']:\n return eval(arg_value.capitalize())\n if arg_value.lstrip('-').isdigit():\n return int(arg_value)\n if arg_value.replace('.', '', 1).isdigit():\n return float(arg_value)\n return arg_value", "def __call__(self, value):\n with tf.name_scope('parser'):\n data = decode(value)\n return self._parse_fn(data)", "def _parse_and_validate(self, val):\n if self._is_parameter_type:\n val = self._parse(val) if isinstance(val, str) else val\n self._validate_or_throw(val)\n return val", "def parse(self, args: typing.List[str]) -> str:\n try:\n args = self.cli_parser.parse_args(args)\n if len(vars(args)) == 1:\n return args.func()\n\n return args.func(args)\n except ArgumentParseError as err:\n return str(err)", "def _parse(value, function, fmt):\n try:\n return function(value)\n except ValueError as e:\n raise_from(ValueError(fmt.format(e)), None)", "def _parse_value(value):\n # Check if it is a boolean, int, or float value\n try:\n value = json.loads(value.lower())\n return value\n except ValueError:\n return value", "def parse_string_value(str_value: Text) -> Any:\n try:\n return ast.literal_eval(str_value)\n except ValueError:\n return str_value\n except SyntaxError:\n # e.g. $var, ${func}\n return str_value", "def parse_value(string: str) -> Union[str, dict, bool, int, float]:\n unesc_str = unescape(string)\n stripped = string.strip()\n if REGEX_RE.match(stripped):\n return {\"regex\": unesc_str.strip()[7:-2]}\n elif BOOL_RE.match(stripped):\n return stripped.lower() == \"true\"\n elif INT_RE.match(stripped):\n return int(stripped)\n elif FLOAT_RE.match(stripped):\n return float(stripped)\n else:\n return unesc_str[1:-1]", "def test_parses(func):\n quoting.parse_function(func)", "def dprime_fnc_from_str_argument(str_arg):\n try:\n return dprime_fnc_from_str(\n str_arg.translate(string.maketrans(':',' '))\n )\n except ValueError, Argument:\n raise ValueError(\"Dprime argument error (in {0}). {1}\".format(str_arg, Argument))", "def vararg_callback(option, opt_str, value, parser):\n\tassert value is None\n\tvalue = []\n\n\tdef floatable(str):\n\t\ttry:\n\t\t\tfloat(str)\n\t\t\treturn True\n\t\texcept ValueError:\n\t\t\treturn False\n\n\tfor arg in parser.rargs:\n\t\t# Stop on options like --foo \n\t\tif arg[:2] == \"--\" and len(arg) > 2:\n\t\t\tbreak\n\t\t# Stop on -a, but not on negative numbers\n\t\tif arg[:1] == \"-\" and len(arg) > 1 and not floatable(arg):\n\t\t\tbreak\n\t\tvalue.append(arg)\n\n\tdel parser.rargs[:len(value)]\n\tsetattr(parser.values, option.dest, value)", "def _str_to_val(self, value):\n kind, value = value.split(': ', 1)\n\n # Lists and dictionaries are special case\n if kind in ('L', 'D'):\n return eval(value)\n\n if kind in TYPE_MAPPING.keys():\n if kind == 'B':\n if value != 'True':\n return False\n\n value = TYPE_MAPPING[kind](value)\n\n return value\n else:\n raise ValueError(\"An Unknown type of setting was found!\")", "def parseArgument(self, argument, getValueOfArgument):\n if argument in self.arguments:\n if type(self.arguments[argument]) == bool:\n self.arguments[argument] = True\n else:\n self.arguments[argument] = self.castType(\n type(self.arguments[argument]), getValueOfArgument())\n else:\n self.error('unknown argument \"--{}\"'. format(argument))", "def _parse(val: str):\n\n if not isinstance(val, str):\n raise TypeError(\"Method requires string input\")\n\n value = re.findall(r'^([-+]?\\d*\\.\\d*(?=\\s)|\\d+(?=\\s))', val)\n if not (value and val[:len(value[0])] == value[0]):\n return val, None\n\n # string starts with value\n value = value[0]\n val = val[len(value):]\n\n val = val.strip()\n if val:\n unit = val\n else:\n unit = 'dimensionless'\n\n return value, unit", "def _func_deserialize(self, args): # pragma: no cover\n if len(args) == 0:\n return self.testing_options['empty']\n x = eval(args.decode(\"utf-8\"))\n return x", "def convert(cls, value: Any) -> Any:\n if value == '':\n return None\n if isinstance(value, str):\n try:\n return ast.literal_eval(value)\n except (SyntaxError, ValueError):\n cls.assert_value_ok(False, value)\n return value", "def str2enum(enum: type) -> Callable[[str], Any]:\n\n def _parse(string: str) -> Any:\n try:\n return enum[string.lower()] # type: ignore\n except BaseException:\n raise argparse.ArgumentTypeError( # pylint: disable=raise-missing-from\n \"Expected one of: \" + \" \".join([value.name for value in enum]) # type: ignore\n )\n\n return _parse", "def parse_metric_value(self, value):\n if isinstance(value, str):\n if value == \"\":\n return None\n\n # yes|true|on\n if self.is_true.match(value):\n return 1\n # no|false|off\n if self.is_false.match(value):\n return 0\n if self.is_null.match(value):\n return -1\n\n # anything else, try to convert it to a float\n try:\n r = float(value)\n return r\n except:\n pass\n\n return None\n\n return value", "def parse_value(cls, value):\n return value", "def parse_value(cls, value):\n return value", "def _eval(string):\n if not isinstance(string, basestring):\n return string\n if string == \"None\": \n return None\n if string == \"True\":\n return True\n if string == \"False\":\n return False\n if string.isdigit():\n return int(string)\n try:\n return float(string)\n except ValueError:\n return decode_utf8(string)", "def Parse(self, argument):\n self._value = argument", "def parse(self, argument):\n self._value = argument", "def parse_as(val, *types):\n for typ in types:\n try:\n return typ(val)\n except ValueError:\n pass\n return quote(val)", "def val_parser(parser, inputstring):\n\n inputstring = inputstring.strip()\n\n if float(inputstring) == 9.9e37:\n output = float('inf')\n else:\n output = float(inputstring)\n if parser == int:\n output = parser(output)\n\n return output", "def parse_function_str(function_str: str) -> Union[Callable[[object, dict], object], None]:\n\n if function_str is None:\n return None\n\n parsed_function = re.match(\"^([\\.\\w]+)(?:\\(([\\w|,%\\'-: ]*)\\))?$\", function_str)\n if not parsed_function:\n raise RuntimeError(f\"Invalid name for a transform function: '{function_str}'\")\n\n function_name, args = parsed_function.groups()\n args_list = str(args).split(\",\") if (args is not None and args != '') else []\n\n # Check if it is a built-in function\n builtin_function = FunctionBuilder.get_builtin_function(function_name, args_list)\n\n if builtin_function is not None:\n return builtin_function\n\n # Get it as custom function\n return FunctionBuilder.get_custom_function(function_name, args_list)", "def parse(string):\n \n global local_vars\n print \"parse(\"+string+\")\"\n\n # variables\n if string in local_vars: # e.g. 'y'\n return string\n elif string == 'it':\n # print 'it: ',references[0]\n return g.it\n\n # operators\n elif string.find('\\gamma') == 0:\n return gamma(string[7],string[9:-1])\n elif string.find('\\iota') == 0:\n # treating iota as gamma for now\n return iota(string[6],string[8:-1])\n\n # function application\n else:\n fun = string.split( '(' , 1)[0]\n arg = parse(string.split( '(' , 1)[1][:-1])\n exec(fun+'(arg)')", "def try_arg(request, arg, typ=None):\n\tif not request.values or arg not in request.values:\n\t\terror(\"Missing '{}' argument\".format(arg))\n\tif typ is None:\n\t\treturn request.values[arg]\n\ttry:\n\t\treturn typ(request.values[arg])\n\texcept ValueError:\n\t\terror(\"Invalid '{}' argument\".format(arg))", "def parse(string):\r\n \r\n global local_vars\r\n # print \"parse(\"+string+\")\"\r\n\r\n # variables\r\n if string in local_vars: # e.g. 'y'\r\n return string\r\n elif string == 'it':\r\n # print 'it: ',references[0]\r\n return g.it\r\n\r\n # operators\r\n elif string.find('\\gamma') == 0:\r\n return gamma(string[7],string[9:-1])\r\n elif string.find('\\iota') == 0:\r\n # treating iota as gamma for now\r\n return iota(string[6],string[8:-1])\r\n\r\n # function application\r\n else:\r\n fun = string.split( '(' , 1)[0]\r\n arg = parse(string.split( '(' , 1)[1][:-1])\r\n exec(fun+'(arg)')", "def parse_parameter(code, param):\n if (\n param != \"null\"\n and param[0] != \"'\"\n and not is_number(param)\n ):\n return find_value(code, param).replace(\"'\", \"\")\n\n return param.replace(\"'\", \"\")", "def possible_float(arg):\n try:\n return float(arg)\n except ValueError:\n logging.info(f'failed to parse {arg} as a float, treating it as a string')\n return arg", "def parse(cls, value: str) -> Tuple[str, Dict[str, str]]:\n raw_value = read_value_from_path(value)\n args: Dict[str, str] = {}\n\n if \"@\" in raw_value:\n args[\"region\"], raw_value = raw_value.split(\"@\", 1)\n\n # now find any other arguments that can be filters\n matches = re.findall(r\"([0-9a-zA-z_-]+:[^\\s$]+)\", raw_value)\n for match in matches:\n k, v = match.split(\":\", 1)\n args[k] = v\n\n return args.pop(\"name_regex\"), args", "def process_value(self, value: str) -> Any:\n\n if not value:\n if self.data_required:\n raise ValueError('A value is required')\n return None\n return self.data_type(value)", "def acfunct(arg):\n try:\n functions = [dynet.rectify, dynet.tanh]\n functions = { function.__name__ : function for function in functions}\n functions[\"None\"] = None\n return functions[str(arg)]\n except:\n raise argparse.ArgumentTypeError(\"String {} does not match required format\".format(arg,))", "def parse_arg(type_, token, name):\n if type_ is Card and token is not None and type(token) is not Card:\n try:\n arg = Card(int(token))\n except ValueError:\n raise GameActionError(\n 'Error converting \"{0}\" argument: {1} token: \"{2}\"'\n .format(name, str(type_), token))\n\n return arg\n\n else:\n return token", "def parseValue(self, value):\n if self.isNumericVector():\n return map(self._pythonType, value.split(','))\n if self.typ == 'boolean':\n return _parseBool(value)\n return self._pythonType(value)", "def parse_value(self, value):\n\t\t\n\t\tif goodies.is_float(value):\n\t\t\treturn float(value)\n\t\telif goodies.is_int(value):\n\t\t\treturn int(value)\n\t\telif goodies.is_bool(value):\n\t\t\treturn bool(value.capitalize())\n\t\telse:\n\t\t\treturn value", "def convert_arg(node):\n if isinstance(node, ast.Name):\n return node.id\n else:\n return convert_literal_node(node)", "def infer_type_and_convert(value:str) -> Any:\n if value.lower() == 'true':\n return True\n elif value.lower() == 'false':\n return False\n elif value.isdigit():\n return int(value)\n elif is_float(value):\n return float(value)\n else:\n return value", "def parse(arg: Tuple[str, str, str, str, str]) -> Tuple[str, str, str]:\n return (arg[2], arg[3], arg[4])", "def handle_value(value):\n\n if value[-1] == 'x':\n return float(value[0:-1])\n\n if value[-1] == '%':\n return float(value[0:-1])\n\n if value[0].isdigit():\n return bytify(value)\n\n raise ValueError", "def _eval(string):\n # Remove black spaces\n string = string.strip()\n # Check if the string is None\n if string == \"\":\n return None\n # Check if the string is an integer\n elif string.isdigit():\n return int(string)\n # Try to convert to float\n try:\n string = re.sub(\"[Dd]\", \"e\", string)\n return float(string)\n except:\n pass\n # Else, return the stripped string\n return str(string)", "def parse_float(val, fn):\n return float(val)", "def type_cast(func,data_entry,*args):\n assert isinstance(data_entry,str)\n assert callable(func)\n try:\n out=func(data_entry,*args)\n except:\n out=None\n return out", "def eval(value: str) -> object:\n if value == \"\":\n return value\n\n value = eval(value)\n if isinstance(value, str):\n return '\"' + value + '\"'\n else:\n return value", "def mock_parser_fcn(s):\n return s", "def parseValue(expr):\n\n\ttry:\n\t\treturn eval(expr)\n\texcept:\n\t\treturn eval(re.sub(\"\\s+\", \",\", expr))\n\telse:\n\t\treturn expr", "def _eval_atom(self, vars, word):\n if word == \"null\":\n return None\n elif word == \"true\":\n return True\n elif word == \"false\":\n return False\n elif word.startswith(\"$\"):\n return vars[word[1:]]\n (instr, arg) = word.split(\":\")\n if instr == \"int\":\n return int(arg)\n elif instr == \"float\":\n return float(arg)\n elif instr == \"id\":\n return uuid.UUID(arg.zfill(32))\n elif instr == \"blob\":\n return bytearray.fromhex(arg)\n elif instr == \"str\":\n return arg\n else:\n raise NotAnAtom()", "def parseArgs(args):\n parsed = []\n for arg in args:\n print arg\n arg = arg.strip()\n interpretation = None\n try:\n interpretation = float(arg)\n if string.find(arg, \".\") == -1:\n interpretation = int(interpretation)\n except:\n # Oh - it was a string.\n interpretation = arg\n pass\n parsed.append(interpretation)\n return parsed", "def resolve_type(value: t.Any) -> t.Any:\n value = str(value).strip()\n if value.lower() == \"true\":\n return True\n elif value.lower() == \"false\":\n return False\n elif value.lower() == \"none\":\n return None\n else:\n # attempt to cast\n try:\n return int(value)\n except:\n pass\n try:\n return float(value)\n except:\n pass\n # attempt to parse\n try:\n return literal_eval(value)\n except ValueError:\n pass\n except SyntaxError: # happens with single topics starting with '/'\n pass\n # unparseable, return as str\n return value", "def _parse_f(kwargs) -> Tuple[str, Any, str]:\n try:\n f_name = kwargs.pop(\"f\")\n except KeyError:\n raise KeyError(\n \"Transformer needs a 'f' field to know which func to use\"\n )\n\n # Look in all transforms dicts\n all_transforms_keys = (\n list(TRANSFORMS_SERIES.keys())\n + list(TRANSFORMS_DF.keys())\n + list(TRANSFORMS_GDF.keys())\n )\n try:\n func = TRANSFORMS_SERIES[f_name]\n input_type = \"s\"\n except KeyError:\n try:\n func = TRANSFORMS_DF[f_name]\n input_type = \"df\"\n except KeyError:\n try:\n func = TRANSFORMS_GDF[f_name]\n input_type = \"gdf\"\n except KeyError:\n raise KeyError(\n f\"You passed f={f_name}. \"\n \"Only the following values of f are recognised: \"\n f\"{all_transforms_keys}\"\n )\n return f_name, func, input_type", "def value_options(*args):\n\n @with_pattern(r\"|\".join(args))\n def parse_options(text):\n return text\n\n return parse_options", "def _parse_env_value(val):\n if val.lower() == \"false\":\n return False\n elif val.lower() == \"true\":\n return True\n try:\n return int(val)\n except ValueError:\n pass\n try:\n return float(val)\n except ValueError:\n pass\n return val", "def parse_arguments(args):", "def parse_tag_value(value_str: str) -> Any:\n # Empty string is short for None.\n if not value_str:\n return None\n\n # If the first character is a JSON compound type (array, object) or\n # or a string, then parse as normal json.\n if value_str[0] in (\"[\", \"{\", '\"'):\n try:\n return json.loads(value_str)\n except json.JSONDecodeError as error:\n raise ValueError(f\"Invalid tag value: {error}\")\n\n # Try to automatically infer the type of the tag value.\n try:\n return int(value_str)\n except ValueError:\n pass\n\n try:\n return float(value_str)\n except ValueError:\n pass\n\n try:\n return str2literal(value_str)\n except ValueError:\n pass\n\n # Assume string.\n return value_str", "def _par_from_parser(x):\n if not isinstance(x, (numbers.Real, u.Quantity)):\n x = float(x)\n return x", "def str2choice(options: List[str]) -> Callable[[str], str]:\n\n def _parse(string: str) -> str:\n if string not in options:\n raise argparse.ArgumentTypeError(\"Expected one of: \" + \" \".join(options))\n return string\n\n return _parse", "def _ParseStringOption(cls, options, argument_name, default_value=None):\n argument_value = getattr(options, argument_name, None)\n if not argument_value:\n return default_value\n\n if isinstance(argument_value, py2to3.BYTES_TYPE):\n encoding = sys.stdin.encoding\n\n # Note that sys.stdin.encoding can be None.\n if not encoding:\n encoding = locale.getpreferredencoding()\n if not encoding:\n encoding = cls._PREFERRED_ENCODING\n\n try:\n argument_value = argument_value.decode(encoding)\n except UnicodeDecodeError as exception:\n raise errors.BadConfigOption((\n u'Unable to convert option: {0:s} to Unicode with error: '\n u'{1:s}.').format(argument_name, exception))\n\n elif not isinstance(argument_value, py2to3.UNICODE_TYPE):\n raise errors.BadConfigOption(\n u'Unsupported option: {0:s} string type required.'.format(\n argument_name))\n\n return argument_value", "def _pre_argument_parsing(self):\n pass", "def str_to_value(s):\n s = s.strip()\n if _int_matcher.match(s):\n return int(s)\n elif _float_matcher.match(s):\n return float(s)\n elif _bool_matcher.match(s):\n return (s.lower() == 'true')\n return s", "def get_value_from_str(value_str):\n try:\n return gdb.parse_and_eval(value_str)\n except RuntimeError:\n return None", "def parse_value(cls, value):\n raise NotImplementedError(\"subclass must implement parse_value()\")", "def parser(self, value: Optional[Callable[[Mapping], Mapping]]) -> None:\n self._parse = value", "def parse(self):\n args = self.args\n if args and not args[0] in [\"'\", \",\", \":\"]:\n args = \" %s\" % args.strip()\n self.args = args", "def processArgument(self, value):\n if not isinstance(value, basestring):\n return value\n \n value = self._RE_FIND.subn(self._replaceFind, value)[0]\n value = self._RE_ENV.subn(self._replaceEnv, value)[0]\n \n return value", "def cast(val):\n\n for func in [int, float, lambda x: x.strip(), lambda x: x]:\n try:\n return func(val)\n except ValueError:\n pass", "def parse_function_line(line: str):\n without_call = (line.split(\"function\")[1]).strip()\n split_array = without_call.split()\n function = split_array[0]\n return function", "def parse_from_request(self, name, request):\n # type: (str, Request) -> Any\n name_bytes = name.encode()\n if name_bytes not in request.args:\n if self.default is not None:\n return self.default\n if self.required:\n raise Error(BAD_REQUEST, message=b\"%s is required\" % name_bytes)\n else:\n return None\n\n if len(request.args[name_bytes]) != 1:\n raise Error(BAD_REQUEST, message=b\"Pass exactly one argument for %s\" % name_bytes)\n\n val = request.args[name_bytes][0]\n return self.parse(val)", "def to_value_type(cls, val_str, type_arg, member_type):\n if val_str is None:\n return None\n if type_arg == bool:\n return cls.ensure_bool(val_str)\n try:\n if type_arg == list:\n return ListValueComponent.create(val_str, member_type=member_type)\n if type_arg == dict:\n return DictValueComponent.create(val_str)\n return type_arg(val_str)\n except (TypeError, ValueError) as e:\n if issubclass(type_arg, Enum):\n choices = \", \".join(f\"{choice.value}\" for choice in type_arg)\n raise ParseError(f\"Invalid choice '{val_str}'. Choose from: {choices}\")\n raise ParseError(\n f\"Error applying type '{type_arg.__name__}' to option value '{val_str}': {e}\"\n )", "def convert_val(val_str, val):\n if val is bool:\n if 'true' in val_str.lower(): val_str = 'true' \n else: val_str = '' # otherwise set to false\n val_type = val\n try:\n return val_type(val_str)\n except ValueError:\n # Can it be a float ?\n return val_type(float(val_str))", "def parse_function(self, match: Match[str]) -> Tuple[str, Optional[str], List[str], str]:\n raw_function = match.group(\"function\")\n function, combinator = fields.parse_combinator(raw_function)\n\n if not self.is_function(function):\n raise self.config.missing_function_error(f\"{function} is not a valid function\")\n\n arguments = fields.parse_arguments(function, match.group(\"columns\"))\n alias: Union[str, Any, None] = match.group(\"alias\")\n\n if alias is None:\n alias = fields.get_function_alias_with_columns(raw_function, arguments)\n\n return (function, combinator, arguments, alias)", "def func_deserialize(self, args): # pragma: no cover\n if len(args) == 0:\n return []\n x = eval(args.decode(\"utf-8\"))\n return x", "def parseValue(value, nodata=(\"\", \"Na\", \"NaN\", \"-\", \"--\", \"N/A\")):\n if value is None:\n return None\n if isstring(value) and value in nodata:\n return None\n if isstring(value) and value.startswith(\"(\") and value.endswith(\")\"):\n value = unwrap(value,\"(\",\")\")\n return parseValue(listify(value))\n if isstring(value) and value.startswith(\"[\") and value.endswith(\"]\"):\n value = unwrap(value,\"[\",\"]\")\n return parseValue(listify(value))\n elif isdate(value):\n return parseDate(value)\n elif isdatetime(value):\n return strftime(\"%Y-%m-%d %H:%M:%S\", value)\n elif isint(value):\n return parseInt(value)\n elif isfloat(value):\n return parseFloat(value)\n elif isbool(value):\n return parseBool(value)\n elif isstring(value):\n return value\n elif isarray(value):\n return [parseValue(item) for item in value]\n return value", "def mock_parser_fcn(s):", "def try_parse_field(field_name, value, parser_dict):\n parser = parser_dict.get(field_name) # None if no such entry\n if parser is not None:\n return try_or_none(parser)(value)\n else:\n return value", "def string_p(value):\n if type(value) is not str:\n raise Invalid(\"invalid value type {value}\".format(value=value))", "def _post_argument_parsing(self):\n pass", "def processArgument(self, value):\r\n if not isinstance(value, basestring):\r\n return value\r\n\r\n value = self._RE_FIND.subn(self._replaceFind, value)[0]\r\n value = self._RE_ENV.subn(self._replaceEnv, value)[0]\r\n\r\n return value", "def try_parse_field(field_name, value, parser_dict):\n parser = parser_dict.get(field_name) # None if no such entry\n if parser is not None:\n return try_or_none(parser)(value)\n else:\n return value", "def get_function_from_text(f):\n return lambda x: eval_expr(f, {'x': x}, numpy_dict)", "def __parse_var(content: str, is_positional: bool) -> (typing.Optional[str], ArgNum, int):\n if content[0] in \"]>\":\n return (None,\n ArgNum(Quantifier.N, 1) if is_positional else ArgNum(Quantifier.FLAG),\n 0)\n\n offset = 0\n\n has_brace = False\n has_equal = False\n\n if content[offset] == \"[\":\n has_brace = True\n offset += 1\n\n if content[offset] == \"=\":\n has_equal = True\n offset += 1\n\n if not is_positional and not has_brace and not has_equal:\n raise PatternError(f\"non-positional arguments with quantifier != 1 must have either '[', '=', or '[=' but found\"\n f\"'{content[offset]}'\")\n\n if (match := __IDENTIFIER_REGEX.match(content[offset::])) is not None:\n ident = match.string[:match.end():]\n offset += match.end()\n else:\n ident = None\n\n arg_num, size = __parse_arg_num(content[offset::],\n (has_equal or is_positional) and has_brace,\n not has_equal and not is_positional)\n offset += size\n\n if has_brace and content[offset] != \"]\":\n raise PatternError(f\"expected ']' but found '{content[offset]}\")\n elif has_brace:\n offset += 1\n\n return ident, arg_num, offset", "def _parseSingle(string):\n string = string.strip()\n \n if len(string) == 0:\n return ''\n \n pattern = re.compile(r'[^0-9]')\n if not pattern.search(string):\n return int(string)\n pattern = re.compile(r'[^0-9\\.eE]')\n if not pattern.search(string):\n if (string.count('.') <= 1 and \n (string.count('e') + string.count('E') <= 1)):\n return float(string)\n \n boolValue = _bool(string)\n if boolValue is not None:\n return boolValue\n \n if string[0] == string[-1]:\n if string[0] == '\"' or string[0] == \"'\":\n return string[1:-1]\n elif string[1] == string[-1]:\n if ((string[0] == 'u' or string[0] == 'r') and \n (string[1] == '\"' or string[1] == \"'\")):\n return string[2:-1]\n \n if string == 'None':\n return None\n \n return string", "def __parse_function_args(self, buffer):\n\t\targs = []\n\t\ttoken = buffer.read(1)\n\t\twhile token != \"(\": # FIXME don't duplicate code with __read_block\n\t\t\ttoken = buffer.read(1)\n\t\t\tassert token\n\t\tcount = 1\n\t\t\n\t\tdef flusharg(arg, args):\n\t\t\targ = \"\".join(arg)\n\t\t\targ = SpellString(arg).format(self.obj, proxy=self.proxy)\n\t\t\targs.append(arg)\n\t\t\treturn []\n\t\t\n\t\t_arg = []\n\t\twhile count:\n\t\t\ttoken = buffer.read(1)\n\t\t\tif token == \"(\":\n\t\t\t\tcount += 1\n\t\t\telif token == \")\":\n\t\t\t\tcount -= 1\n\t\t\tif not count or not token:\n\t\t\t\t_arg = flusharg(_arg, args)\n\t\t\t\tbreak\n\t\t\t\n\t\t\tif token == \",\" and count == 1:\n\t\t\t\t_arg = flusharg(_arg, args)\n\t\t\t\tcontinue\n\t\t\t\n\t\t\t_arg.append(token)\n\t\t\n\t\treturn args", "def Args(parser):", "def parse_value(cls, value):\n choice, value = value.split('=')\n value = cls.VALUES_MAP[value]\n\n return choice, value", "def test_parsingValues(self):\n argV = (\"--fooint 912 --foofloat -823.1 \"\n \"--eggint 32 --eggfloat 21\").split()\n self.usage.parseOptions(argV)\n self.failUnlessEqual(self.usage.opts['fooint'], 912)\n self.assert_(isinstance(self.usage.opts['fooint'], int))\n self.failUnlessEqual(self.usage.opts['foofloat'], -823.1)\n self.assert_(isinstance(self.usage.opts['foofloat'], float))\n self.failUnlessEqual(self.usage.opts['eggint'], 32)\n self.assert_(isinstance(self.usage.opts['eggint'], int))\n self.failUnlessEqual(self.usage.opts['eggfloat'], 21.)\n self.assert_(isinstance(self.usage.opts['eggfloat'], float))", "def _conversion(self, val):\n if (self.__set_type == \"str\"):\n return val\n else:\n try:\n return ast.literal_eval(val)\n except ValueError:\n return None", "def __arg(self):\n arg = None\n\n # Match optional identifier\n initial_pos = self.pos\n try:\n arg = self.match_type(Identifier)\n except ParseError:\n self.pos = initial_pos\n\n # Match optional '...'\n ellipsis_pos = self.pos\n try:\n punc = self.match_value(Punctuator, \".\")\n self.match_value(Punctuator, \".\")\n self.match_value(Punctuator, \".\")\n if arg is None:\n arg = Identifier(punc.line, punc.col, punc.prev_white, \"...\")\n else:\n arg.token += \"...\"\n except ParseError:\n self.pos = ellipsis_pos\n\n if arg is not None:\n return arg\n raise ParseError(\"Invalid argument\")", "def _parse(self, val):\n if self.type == \"integer\":\n return int(val)\n elif self.type == \"number\":\n return float(val)\n elif self.type == \"boolean\":\n lower_val = str(val).lower()\n if lower_val not in {\"true\", \"false\"}:\n msg = \"Boolean parameter '{}' only accept True/False, got {}.\"\n raise ValidationException(\n message=msg.format(self.name, val),\n no_personal_data_message=msg.format(\"[self.name]\", \"[val]\"),\n error_category=ErrorCategory.USER_ERROR,\n target=ErrorTarget.PIPELINE,\n )\n return True if lower_val == \"true\" else False\n return val", "def _str_validator(arg):\n if arg is None or arg is '' or type(arg) != str:\n raise ValueError('Incorrect value: input should be a string')", "def eval_with_pFunc(string):\n return eval(string)", "def parse(token):\n\n pass", "def parse(self, fstring):\n pass", "def __parse_arg_num(content: str, is_optional: bool, is_flag: bool) -> (ArgNum, int):\n match = __QUANTIFIER_REGEX.match(content)\n\n n = None\n offset = 0\n\n if match is None:\n if is_flag:\n quantifier = Quantifier.FLAG\n elif is_optional:\n quantifier = Quantifier.OPTIONAL\n else:\n quantifier = Quantifier.N\n n = 1\n else:\n body = match.string[:match.end()]\n\n if body == \"...\":\n quantifier = Quantifier.ANY if is_optional else Quantifier.AT_LEAST_ONE\n offset = 3\n elif body == \"*\":\n quantifier = Quantifier.ANY\n offset = 1\n elif body[0] == \"{\":\n try:\n n = int(match.group(1))\n except ValueError as err:\n raise PatternError(f\"bad quantifier: {err}\")\n\n if n == 0:\n quantifier = Quantifier.FLAG\n n = None\n else:\n quantifier = Quantifier.N\n\n offset = match.end()\n else:\n raise PatternError(f\"unknown quantifier found: '{match.string[:match.end()]}\")\n\n if is_optional and (quantifier == Quantifier.N and n != 1):\n raise PatternError(\"optional argument values must only have a Quantifier of 1\")\n\n return ArgNum(quantifier, n), offset", "def parseString(self, s):\n pass", "def str2optional(parser: Callable[[str], Any]) -> Callable[[str], Optional[Any]]:\n\n def _parse(string: str) -> Optional[Any]:\n if string.lower() == \"none\":\n return None\n return parser(string)\n\n return _parse", "def parse_atom_value(atom_value_str: str) -> AtomValue:\n return AtomValue(atom_value_str, atom_value_str.isupper())", "def parse_bool(arg):\n if arg == 'True':\n return True\n elif arg == 'False':\n return False\n else:\n raise argparse.ArgumentTypeError(\"Expected 'True' or 'False'.\")", "def get_arg(self, match: str) -> Optional[str]:\n value = self.aug.get(match)\n\n # No need to strip quotes for variables, as apache2ctl already does\n # this, but we do need to strip quotes for all normal arguments.\n\n # Note: normal argument may be a quoted variable\n # e.g. strip now, not later\n if not value:\n return None\n\n value = value.strip(\"'\\\"\")\n\n variables = ApacheParser.arg_var_interpreter.findall(value)\n\n for var in variables:\n # Strip off ${ and }\n try:\n value = value.replace(var, self.variables[var[2:-1]])\n except KeyError:\n raise errors.PluginError(\"Error Parsing variable: %s\" % var)\n\n return value", "def function_argument(self):\n shards = self.line.split()\n if len(shards) > 2:\n if shards[0] == 'function':\n if ':' in shards[-1]:\n return True" ]
[ "0.6599658", "0.65680563", "0.65047354", "0.64847267", "0.6464504", "0.6307335", "0.62872803", "0.60615987", "0.6032944", "0.58954966", "0.58332956", "0.58204687", "0.5797649", "0.5789056", "0.5745152", "0.5726418", "0.57198304", "0.57167447", "0.568619", "0.568619", "0.5685029", "0.568201", "0.5672512", "0.5664595", "0.56627053", "0.5639778", "0.5621604", "0.5621154", "0.5609844", "0.5605723", "0.5597989", "0.55961907", "0.55960953", "0.5580159", "0.5574382", "0.5552925", "0.55323255", "0.5523952", "0.5516075", "0.5491107", "0.54743207", "0.54739237", "0.5468898", "0.54634744", "0.5452221", "0.54469705", "0.54424846", "0.54292583", "0.5428747", "0.54174143", "0.5416046", "0.5406376", "0.5403873", "0.53984445", "0.5396863", "0.53931534", "0.53899914", "0.53749484", "0.5369865", "0.5369714", "0.53530216", "0.5346036", "0.5299817", "0.52966887", "0.52965885", "0.528976", "0.5280217", "0.5268886", "0.5265915", "0.5264121", "0.5263033", "0.5259571", "0.5253507", "0.5250673", "0.5247665", "0.52468383", "0.52447164", "0.52413213", "0.52397627", "0.5235108", "0.5212543", "0.52110124", "0.5208008", "0.5205471", "0.5204832", "0.51863086", "0.51825523", "0.51754814", "0.5160968", "0.5157721", "0.5157687", "0.51525825", "0.5152187", "0.5148861", "0.5147355", "0.5142125", "0.5140341", "0.5138741", "0.51364446", "0.5128704" ]
0.53076184
62
Load certificate info from existing certificate or certificate request.
def load_from_existing(self, obj): self.subject = self.extract_name(obj.subject) for ext in obj.extensions: crit = ext.critical extobj = ext.value if ext.oid == ExtensionOID.BASIC_CONSTRAINTS: if not crit: raise InvalidCertificate("BASIC_CONSTRAINTS must be critical") self.ca = extobj.ca self.path_length = None if self.ca: self.path_length = extobj.path_length elif ext.oid == ExtensionOID.KEY_USAGE: if not crit: raise InvalidCertificate("KEY_USAGE must be critical") self.usage += self.extract_key_usage(extobj) elif ext.oid == ExtensionOID.SUBJECT_ALTERNATIVE_NAME: self.san = self.extract_gnames(extobj) elif ext.oid == ExtensionOID.EXTENDED_KEY_USAGE: self.usage += self.extract_xkey_usage(extobj) elif ext.oid == ExtensionOID.AUTHORITY_INFORMATION_ACCESS: for ad in extobj: if not isinstance(ad.access_location, x509.UniformResourceIdentifier): InvalidCertificate("Unsupported access_location: %s" % (ad.access_location,)) url = as_unicode(ad.access_location.value) if ad.access_method == AuthorityInformationAccessOID.CA_ISSUERS: self.issuer_urls.append(url) elif ad.access_method == AuthorityInformationAccessOID.OCSP: self.ocsp_urls.append(url) else: raise InvalidCertificate("Unsupported access_method: %s" % (ad.access_method,)) elif ext.oid == ExtensionOID.CRL_DISTRIBUTION_POINTS: for dp in extobj: if dp.relative_name: raise InvalidCertificate("DistributionPoint.relative_name not supported") if dp.crl_issuer: raise InvalidCertificate("DistributionPoint.crl_issuer not supported") if dp.reasons: raise InvalidCertificate("DistributionPoint.reasons not supported") for gn in self.extract_gnames(dp.full_name): if gn.startswith('uri:'): self.crl_urls.append(gn[4:]) else: raise InvalidCertificate("Unsupported DistributionPoint: %s" % (gn,)) elif ext.oid == ExtensionOID.NAME_CONSTRAINTS: self.permit_subtrees = self.extract_gnames(extobj.permitted_subtrees) self.exclude_subtrees = self.extract_gnames(extobj.excluded_subtrees) elif ext.oid == ExtensionOID.SUBJECT_KEY_IDENTIFIER: pass elif ext.oid == ExtensionOID.AUTHORITY_KEY_IDENTIFIER: pass elif ext.oid == ExtensionOID.OCSP_NO_CHECK: self.ocsp_nocheck = True elif ext.oid == ExtensionOID.TLS_FEATURE: for tls_feature_code in extobj: if tls_feature_code == x509.TLSFeatureType.status_request: self.ocsp_must_staple = True elif tls_feature_code == x509.TLSFeatureType.status_request_v2: self.ocsp_must_staple_v2 = True else: raise InvalidCertificate("Unsupported TLSFeature: %r" % (tls_feature_code,)) else: raise InvalidCertificate("Unsupported extension in CSR: %s" % (ext,))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_x509_cert(url, httpc, spec2key, **get_args):\n try:\n r = httpc(\"GET\", url, allow_redirects=True, **get_args)\n if r.status_code == 200:\n cert = str(r.text)\n try:\n public_key = spec2key[cert] # If I've already seen it\n except KeyError:\n public_key = import_public_key_from_pem_data(cert)\n spec2key[cert] = public_key\n\n if isinstance(public_key, rsa.RSAPublicKey):\n return {\"rsa\": public_key}\n elif isinstance(public_key, ec.EllipticCurvePublicKey):\n return {\"ec\": public_key}\n else:\n raise Exception(\"HTTP Get error: %s\" % r.status_code)\n except Exception as err: # not a RSA key\n logger.warning(\"Can't load key: %s\" % err)\n return []", "def _parse_certificate(cls, response):\n links = _parse_header_links(response)\n try:\n cert_chain_uri = links[u'up'][u'url']\n except KeyError:\n cert_chain_uri = None\n return (\n response.content()\n .addCallback(\n lambda body: messages.CertificateResource(\n uri=cls._maybe_location(response),\n cert_chain_uri=cert_chain_uri,\n body=body))\n )", "def fusion_api_get_certificate_info(self, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)", "def get_certinfo(doc):\n\n #set a two second default timeout to recieve a cert\n socket.setdefaulttimeout(2)\n doc['ssl'] = {} \n\n try:\n cert = ssl.get_server_certificate((doc['hostname'], 443))\n #sometimes certs come back as unicode so cast to str() aka ascii\n cert = M2Crypto.X509.load_cert_string(str(cert))\n\n except:\n syslog.syslog('[*] Failed to get ssl certificate from %s' % doc['hostname'])\n print('[*] Failed to get ssl certificate from %s' % doc['hostname'])\n #lets remove the ssl key and return the doc untouched\n doc.pop('ssl')\n return doc\n\n\n #get creation date\n doc['ssl']['created'] = cert.get_not_before().get_datetime().isoformat()\n #get not valid after, aka expiration data\n doc['ssl']['expire'] = cert.get_not_after().get_datetime().isoformat()\n #get issuer information\n doc['ssl']['issuer'] = cert.get_issuer().as_text()\n #get subject information\n doc['ssl']['subject'] = cert.get_subject().as_text()\n #get keysize, size() returns in bytes, so we multiply * 8 to get the number of bits\n doc['ssl']['keysize'] = cert.get_pubkey().size() * 8\n #get cert fingerprint for comparison\n doc['ssl']['fingerprint'] = cert.get_fingerprint()\n\n return doc", "def get_certificate_request(self, vault_name: str,\n certificate_name: str,\n certificate_version: str) -> dict[str, Any]:\n url = f'https://{vault_name}{self.azure_cloud.suffixes.keyvault_dns}/certificates/{certificate_name}'\n if certificate_version:\n url = url + f'/{certificate_version}'\n response = self.http_request(\n 'GET', full_url=url,\n resource=self.get_vault_resource())\n\n return response", "def fetch_x509_context(self) -> X509Context:", "def getCertificate(self, req):\n return dumpCert(createCert(parseCertReqStr(req), self._cert,\n self._key))", "def request_cert():\n\n api_request = shallow_copy(props)\n\n for key in ['ServiceToken', 'Region', 'Tags', 'Route53RoleArn']:\n api_request.pop(key, None)\n\n if 'ValidationMethod' in props:\n if props['ValidationMethod'] == 'DNS':\n\n # Check that we have all the hosted zone information we need to validate\n # before we create the certificate\n for name in set([props['DomainName']] + props.get('SubjectAlternativeNames', [])):\n get_zone_for(name)\n\n del api_request['DomainValidationOptions']\n\n e['PhysicalResourceId'] = acm.request_certificate(\n IdempotencyToken=i_token,\n **api_request\n )['CertificateArn']\n add_tags()", "def load_cert_chain(self, certfile, keyfile: Optional[Any] = ...):\n ...", "def solve(certificate_data: bytes) -> Certificate:\n return { # type: ignore\n Encoding.PEM: load_pem_x509_certificate,\n Encoding.DER: load_der_x509_certificate\n }[real_encoding](certificate_data, default_backend())", "def get_ssl_certificate():", "def cert_info(user, course):\r\n if not course.may_certify():\r\n return {}\r\n\r\n return _cert_info(user, course, certificate_status_for_student(user, course.id))", "def get_ssl_certificate() :", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[str]] = None,\n certificate_name: Optional[pulumi.Input[str]] = None,\n domain: Optional[pulumi.Input[str]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None) -> 'Certificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _CertificateState.__new__(_CertificateState)\n\n __props__.__dict__[\"certificate\"] = certificate\n __props__.__dict__[\"certificate_id\"] = certificate_id\n __props__.__dict__[\"certificate_name\"] = certificate_name\n __props__.__dict__[\"domain\"] = domain\n __props__.__dict__[\"instance_id\"] = instance_id\n __props__.__dict__[\"private_key\"] = private_key\n return Certificate(resource_name, opts=opts, __props__=__props__)", "def loaded_certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LoadedCertificateArgs']]]]:\n return pulumi.get(self, \"loaded_certificates\")", "def load_pem_x509_certificate(data):\n return _x509.load_pem_x509_certificate(data, _backends.default_backend())", "def ssl_get_cert_from_request(request):\r\n certkey = \"SSL_CLIENT_S_DN\" # specify the request.META field to use\r\n\r\n cert = request.META.get(certkey, '')\r\n if not cert:\r\n cert = request.META.get('HTTP_' + certkey, '')\r\n if not cert:\r\n try:\r\n # try the direct apache2 SSL key\r\n cert = request._req.subprocess_env.get(certkey, '')\r\n except Exception:\r\n return ''\r\n\r\n return cert", "def pickup_certificate(self):\n return self.__query(\"certificatePickup\", data)", "def Certificate(self) -> _n_8_t_0:", "def Certificate(self) -> _n_8_t_0:", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[int]] = None,\n creation_timestamp: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n expire_time: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n self_link: Optional[pulumi.Input[str]] = None) -> 'SSLCertificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _SSLCertificateState.__new__(_SSLCertificateState)\n\n __props__.__dict__[\"certificate\"] = certificate\n __props__.__dict__[\"certificate_id\"] = certificate_id\n __props__.__dict__[\"creation_timestamp\"] = creation_timestamp\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"expire_time\"] = expire_time\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"name_prefix\"] = name_prefix\n __props__.__dict__[\"private_key\"] = private_key\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"self_link\"] = self_link\n return SSLCertificate(resource_name, opts=opts, __props__=__props__)", "def fetch_cert(source, entry, s3_client):\n if source == \"s3\":\n bucket_and_key = parse_s3_url(entry)\n logger.info(\"...reading s3 source = {}\".format(bucket_and_key))\n pem_cert = s3_client.get_object(\n Bucket=bucket_and_key[\"bucket\"], Key=bucket_and_key[\"key\"]\n )\n pem_cert_body = pem_cert[\"Body\"].read()\n elif source == \"memory\":\n logger.info(\"...reading from memory\")\n pem_cert_body = entry\n else:\n raise ValueError(\n \"Invalid cert entry type {}, \" \"must be one of s3, memory\".format(source)\n )\n\n # Python3 will return a byte string, Python2 will return a string\n if type(pem_cert_body) == bytes:\n pem_cert_body = pem_cert_body.decode(\"utf-8\")\n\n return pem_cert_body", "def _lazy_read_ca_bundle():\n if len(ROOT_CERTIFICATES_DICT) > 0:\n return\n\n logger = getLogger(__name__)\n try:\n ca_bundle = (os.environ.get('REQUESTS_CA_BUNDLE') or\n os.environ.get('CURL_CA_BUNDLE'))\n if ca_bundle and path.exists(ca_bundle):\n # if the user/application specifies cabundle.\n read_cert_bundle(ca_bundle)\n else:\n import sys\n from botocore.vendored.requests import certs\n if hasattr(certs, '__file__') and \\\n path.exists(certs.__file__) and \\\n path.exists(path.join(\n path.dirname(certs.__file__), 'cacert.pem')):\n # if cacert.pem exists next to certs.py in request pacakage\n ca_bundle = path.join(\n path.dirname(certs.__file__), 'cacert.pem')\n read_cert_bundle(ca_bundle)\n elif hasattr(sys, '_MEIPASS'):\n # if pyinstaller includes cacert.pem\n cabundle_candidates = [\n ['botocore', 'vendored', 'requests', 'cacert.pem'],\n ['requests', 'cacert.pem'],\n ['cacert.pem'],\n ]\n for filename in cabundle_candidates:\n ca_bundle = path.join(sys._MEIPASS, *filename)\n if path.exists(ca_bundle):\n read_cert_bundle(ca_bundle)\n break\n else:\n logger.error('No cabundle file is found in _MEIPASS')\n try:\n import certifi\n read_cert_bundle(certifi.where())\n except:\n logger.debug('no certifi is installed. ignored.')\n\n except Exception as e:\n logger.error('Failed to read ca_bundle: %s', e)\n\n if len(ROOT_CERTIFICATES_DICT) == 0:\n logger.error('No CA bundle file is found in the system. '\n 'Set REQUESTS_CA_BUNDLE to the file.')", "def _check_certificate(public_cert_content, priv_key_content,\n domain=None, at_time=None):\n result = {}\n # Read the private key and public certificate\n try:\n priv_key = OpenSSL.crypto.load_privatekey(\n OpenSSL.crypto.FILETYPE_PEM, priv_key_content)\n except OpenSSL.crypto.Error as err:\n result.update({'ssl_certificate_key': {\n 'state': 'invalid', 'detail': str(err)}})\n priv_key = None\n\n try:\n public_cert = OpenSSL.crypto.load_certificate(\n OpenSSL.crypto.FILETYPE_PEM, public_cert_content)\n except OpenSSL.crypto.Error as err:\n result.update({'ssl_certificate': {\n 'state': 'invalid', 'detail': str(err)}})\n public_cert = None\n\n if priv_key and public_cert:\n context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)\n context.use_privatekey(priv_key)\n context.use_certificate(public_cert)\n try:\n context.check_privatekey()\n except OpenSSL.SSL.Error:\n result.update({'ssl_certificate': {'state': 'invalid',\n 'detail': \"certificate does not match private key.\"}})\n\n if result:\n raise RuntimeError(result)\n\n not_after = public_cert.get_notAfter()\n if not isinstance(not_after, six.string_types):\n not_after = not_after.decode('utf-8')\n not_after = datetime.datetime.strptime(not_after, \"%Y%m%d%H%M%SZ\")\n common_name = public_cert.get_subject().commonName\n alt_names = []\n for ext_idx in range(0, public_cert.get_extension_count()):\n extension = public_cert.get_extension(ext_idx)\n if extension.get_short_name().decode('utf-8') == 'subjectAltName':\n # data of the X509 extension, encoded as ASN.1\n decoded_alt_names, _ = asn1_decoder(\n extension.get_data(), asn1Spec=SubjectAltName())\n for alt in nat_encoder(decoded_alt_names):\n alt_name = alt['dNSName'].decode('utf-8')\n if alt_name != common_name:\n alt_names += [alt_name]\n if domain:\n found = False\n for alt_name in [common_name] + alt_names:\n regex = alt_name.replace('.', r'\\.').replace('*', r'.*') + '$'\n if re.match(regex, domain) or alt_name == domain:\n found = True\n break\n if not found:\n result.update({'ssl_certificate': {'state': 'invalid',\n 'detail': \"domain name (%s) does not match common or alt names\"\\\n \" present in certificate (%s, %s).\" % (\n domain, common_name, ','.join(alt_names))}})\n if at_time:\n if not_after <= at_time:\n result.update({'ssl_certificate': {'state': 'invalid',\n 'detail': \"certificate is only valid until %s.\" % not_after}})\n\n if result:\n raise RuntimeError(result)\n\n result.update({'ssl_certificate': {\n 'common_name': common_name,\n 'alt_names': alt_names,\n 'state': result.get('ssl_certificate', {}).get('state', 'valid'),\n 'issuer': public_cert.get_issuer().organizationName,\n 'ends_at': not_after.isoformat()}})\n return result", "def certificate_data(prog):\n retval = Prog.RetVal.ok\n prog.log.info3(\"+++ generating certificate data (hashes)...\")\n for target in prog.target_list:\n uniq = []\n for t in target.tlsa:\n if t.params() in uniq:\n continue\n uniq += [ t.params() ]\n\n prog.log.info3(\n \" ++ tlsa: {}{}{}, request: {}\".format(t.usage, t.selector,\n t.matching,\n target.domain))\n try:\n data = get_data(prog, target.domain, t)\n for d in data:\n prog.log.info3(\n \" + cert: {}\\n + data: {}\".format(d[0], d[1]))\n\n # The only time we _don't_ print this, is if we are\n # printing the log info to stdout and the debug level\n # is 'debug':\n if not (prog.log.type == logging.LogType.stdout\n and prog.log.level == logging.LogLevel.debug):\n print(\"{} {} {} {} {} {}\".format(\n get_domain(prog, d[0]),\n t.usage, t.selector, t.matching, d[1], d[0]))\n\n except (Except.FunctionError, Except.InternalError,\n Except.DNSProcessingError) as ex:\n prog.log.error(\"{}: {}\".format(target.domain, ex.message))\n retval = Prog.RetVal.exit_failure\n continue\n\n return retval", "def get_certificate(self, cert_id):\r\n return self.ssl.getObject(id=cert_id)", "def cert(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cert\")", "def cert(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cert\")", "def _extract_values_from_certificate(cert):\n logger = getLogger(__name__)\n # cert and serial number\n data = {\n u'cert': cert,\n u'issuer': cert.get_issuer().der(),\n u'serial_number': cert.get_serial_number(),\n u'algorithm': rfc2437.id_sha1,\n u'algorithm_parameter': univ.Any(hexValue='0500') # magic number\n }\n # DN Hash\n data[u'name'] = cert.get_subject()\n cert_der = data[u'name'].der()\n sha1_hash = hashlib.sha1()\n sha1_hash.update(cert_der)\n data[u'name_hash'] = sha1_hash.hexdigest()\n\n # public key Hash\n data['key_hash'] = _get_pubickey_sha1_hash(cert).hexdigest()\n\n # CRL and OCSP\n data['crl'] = None\n ocsp_uris0 = []\n for idx in range(cert.get_extension_count()):\n e = cert.get_extension(idx)\n if e.get_short_name() == b'authorityInfoAccess':\n for line in str(e).split(u\"\\n\"):\n m = OCSP_RE.match(line)\n if m:\n logger.debug(u'OCSP URL: %s', m.group(1))\n ocsp_uris0.append(m.group(1))\n elif e.get_short_name() == b'crlDistributionPoints':\n for line in str(e).split(u\"\\n\"):\n m = CRL_RE.match(line)\n if m:\n logger.debug(u\"CRL: %s\", m.group(1))\n data['crl'] = m.group(1)\n\n if len(ocsp_uris0) == 1:\n data['ocsp_uri'] = ocsp_uris0[0]\n elif len(ocsp_uris0) == 0:\n data['ocsp_uri'] = u''\n else:\n raise OperationalError(\n msg=u'More than one OCSP URI entries are specified in '\n u'the certificate',\n errno=ER_FAILED_TO_GET_OCSP_URI,\n )\n data[u'is_root_ca'] = cert.get_subject() == cert.get_issuer()\n return data", "def test_use_certificate(self, ctx_or_conn):\n # TODO\n # Hard to assert anything. But we could set a privatekey then ask\n # OpenSSL if the cert and key agree using check_privatekey. Then as\n # long as check_privatekey works right we're good...\n ctx_or_conn.use_certificate(\n load_certificate(FILETYPE_PEM, root_cert_pem)\n )", "def _load_ssl_certificate(self) -> ssl.SSLContext:\n\n sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)\n sslcontext.load_cert_chain(\n path.join(path.dirname(__file__), '..', '..', 'player.crt'),\n path.join(path.dirname(__file__), '..', '..', 'player.key')\n )\n\n return sslcontext", "def get_own_cert(self):\n# _log.debug(\"get_own_cert: node_name={}\".format(self.node_name))\n try:\n certpath = self.get_own_cert_path()\n st_cert = open(certpath, 'rt').read()\n cert_part = st_cert.split(BEGIN_CRT_LINE)\n certstr = \"{}{}\".format(BEGIN_CRT_LINE, cert_part[1])\n cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM,\n certstr)\n _log.debug(\"get_own_cert\"\n \"\\n\\tcertpath={}\".format(certpath))\n #Check that the certificate parameters are the same as our attributes\n if not certificate.cert_O(certstring=certstr) == self.domain:\n _log.error(\"Domain does not match certificate\")\n raise Exception(\"Domain does not match certificate\")\n if not certificate.cert_CN(certstring=certstr) == self.node_name:\n _log.error(\"Node name does not match certificate\")\n raise Exception(\"Node name does not match certificate\")\n if not certificate.cert_DN_Qualifier(certstring=certstr) == self.node_id:\n _log.error(\"Node ID does not match certificate\")\n raise Exception(\"Node ID does not match certificate\")\n return certpath, cert, certstr\n except Exception as err:\n # Certificate not available\n _log.debug(\"No runtime certificate can be found, err={}\".format(err))\n return None, None, None", "def certificate_auth():\r\n url = 'https://www.12306.cn'\r\n response = requests.get(url, verify=False)\r\n print(response.status_code)\r\n print(response.text)", "def get_x509_certificate_by_name(certs, key_name):\n for cert in certs['certificates']:\n if cert['key_name'] == key_name:\n return cert['x509_certificate_pem']\n raise CertificateError('Certificate \\'%s\\' not found' % key_name)", "def get_certificate(self, cert_name, callback=None):\n # TODO: get certificate from DHT (alternative to getting from disk).\n# _log.debug(\"get_certificate:\\n\\tmy_node_name={}\\n\\tcert_name={}\\n\\tcallback={}\".format(self.node_name, cert_name, callback))\n try:\n cert = self.get_certificate_locally(cert_name)\n if cert and callback:\n callback(certstring=cert)\n elif cert:\n return cert\n else:\n try:\n self.node.storage.get_index(['certificate',cert_name],\n cb=CalvinCB(self._get_certificate_from_storage_cb,\n callback=callback))\n except Exception as err:\n _log.debug(\"Certificate could not be found in storage, err={}\".format(err))\n raise\n except Exception as err:\n _log.debug(\"Failed searching for certificate locally, cert_name={}, err={}\".format(cert_name, err))", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def add_certificate(self, certificate):\r\n return self.ssl.createObject(certificate)", "def load_cert(file, format=FORMAT_PEM):\n bio = BIO.openfile(file)\n if format == FORMAT_PEM:\n return load_cert_bio(bio)\n elif format == FORMAT_DER:\n cptr = m2.d2i_x509(bio._ptr())\n if cptr is None:\n raise X509Error(Err.get_error())\n return X509(cptr, _pyfree=1)\n else:\n raise ValueError(\"Unknown format. Must be either FORMAT_DER or FORMAT_PEM\")", "def load_key_and_cert(key_file, cert_file):\n with open(cert_file, 'rb') as f:\n cert = x509.load_pem_x509_certificate(f.read(), default_backend())\n with open(key_file, 'rb') as f:\n key = serialization.load_pem_private_key(f.read(), None, backend=default_backend())\n\n return key, cert", "def _get_cached_certificate_with_key(self, cache_id):\n with stats.timer('get_cached_certificate_with_key'):\n item = self.cache.get(cache_id)\n # We're the first thread attempting to get this certificate\n if not item:\n return {}\n # A certificate hasn't been issued yet, but since the cache id\n # exists, another thread has requested the certificate.\n if not item.response and item.lock:\n raise CertificateNotReadyError()\n # If the other thread failed to get the certificate, we need to\n # ensure that this thread attempts to fetch a certificate.\n return item.response", "def DescribeCertificateDetail(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"DescribeCertificateDetail\", params, headers=headers)\n response = json.loads(body)\n model = models.DescribeCertificateDetailResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def __init__(self, cert_string=None, cert_file=None, key_string=None, key_file=None, passphrase=None):\n self._context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)\n\n if cert_file:\n # we have to load certificate for equality check. there is no\n # other way to obtain certificate from context.\n with open(cert_file, 'rb') as fp:\n cert_string = fp.read()\n\n cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert_string)\n self._context.use_certificate(cert)\n\n if not key_string and not key_file:\n # OpenSSL is smart enought to locate private key in certificate\n args = [OpenSSL.crypto.FILETYPE_PEM, cert_string]\n if passphrase is not None:\n args.append(passphrase)\n\n pk = OpenSSL.crypto.load_privatekey(*args)\n self._context.use_privatekey(pk)\n elif key_file and not passphrase:\n self._context.use_privatekey_file(key_file, OpenSSL.crypto.FILETYPE_PEM)\n\n else:\n if key_file:\n # key file is provided with passphrase. context.use_privatekey_file\n # does not use passphrase, so we have to load the key file manually.\n with open(key_file, 'rb') as fp:\n key_string = fp.read()\n\n args = [OpenSSL.crypto.FILETYPE_PEM, key_string]\n if passphrase is not None:\n args.append(passphrase)\n\n pk = OpenSSL.crypto.load_privatekey(*args)\n self._context.use_privatekey(pk)\n\n # check if we are not passed some garbage\n self._context.check_privatekey()\n\n # used to compare certificates.\n self._equality = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert)", "def certificate_check(self, certificate, valid, host):\n\n raise Passthrough", "def get_certificate_policy_request(self, vault_name: str, certificate_name: str) -> dict[str, Any]:\n url = f'https://{vault_name}{self.azure_cloud.suffixes.keyvault_dns}/certificates/{certificate_name}/policy'\n response = self.http_request(\n 'GET', full_url=url, resource=self.get_vault_resource())\n\n return response", "def _try_load_ca_cert(path):\n crt = crypto.load_certificate(crypto.FILETYPE_PEM,\n open(path, 'rb').read())\n if crt.has_expired():\n raise ValueError('CA certificate has expired.')\n if crt.get_signature_algorithm() in ('md5', 'sha1'):\n raise ValueError('CA certificate signed with MD5 or SHA1.')\n return crt", "def _cert_info(user, course, cert_status):\r\n # simplify the status for the template using this lookup table\r\n template_state = {\r\n CertificateStatuses.generating: 'generating',\r\n CertificateStatuses.regenerating: 'generating',\r\n CertificateStatuses.downloadable: 'ready',\r\n CertificateStatuses.notpassing: 'notpassing',\r\n CertificateStatuses.restricted: 'restricted',\r\n }\r\n\r\n default_status = 'processing'\r\n\r\n default_info = {'status': default_status,\r\n 'show_disabled_download_button': False,\r\n 'show_download_url': False,\r\n 'show_survey_button': False,\r\n }\r\n\r\n if cert_status is None:\r\n return default_info\r\n\r\n status = template_state.get(cert_status['status'], default_status)\r\n\r\n d = {'status': status,\r\n 'show_download_url': status == 'ready',\r\n 'show_disabled_download_button': status == 'generating',\r\n 'mode': cert_status.get('mode', None)}\r\n\r\n if (status in ('generating', 'ready', 'notpassing', 'restricted') and\r\n course.end_of_course_survey_url is not None):\r\n d.update({\r\n 'show_survey_button': True,\r\n 'survey_url': process_survey_link(course.end_of_course_survey_url, user)})\r\n else:\r\n d['show_survey_button'] = False\r\n\r\n if status == 'ready':\r\n if 'download_url' not in cert_status:\r\n log.warning(\"User %s has a downloadable cert for %s, but no download url\",\r\n user.username, course.id)\r\n return default_info\r\n else:\r\n d['download_url'] = cert_status['download_url']\r\n\r\n if status in ('generating', 'ready', 'notpassing', 'restricted'):\r\n if 'grade' not in cert_status:\r\n # Note: as of 11/20/2012, we know there are students in this state-- cs169.1x,\r\n # who need to be regraded (we weren't tracking 'notpassing' at first).\r\n # We can add a log.warning here once we think it shouldn't happen.\r\n return default_info\r\n else:\r\n d['grade'] = cert_status['grade']\r\n\r\n return d", "def load_certificate(file_path: str, encoding: Encoding = None) -> Certificate:\n real_encoding = encoding or _get_encoding_type(file_path)\n\n def solve(certificate_data: bytes) -> Certificate:\n \"\"\"Determine the type of data and perform loading based on data type.\n\n :param certificate_data: given certificate data\n :return: loaded certificate\n \"\"\"\n return { # type: ignore\n Encoding.PEM: load_pem_x509_certificate,\n Encoding.DER: load_der_x509_certificate\n }[real_encoding](certificate_data, default_backend())\n\n return generic_load(file_path, solve)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n arn: Optional[pulumi.Input[str]] = None,\n certificate_body: Optional[pulumi.Input[str]] = None,\n certificate_chain: Optional[pulumi.Input[str]] = None,\n expiration: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n path: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n upload_date: Optional[pulumi.Input[str]] = None) -> 'ServerCertificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ServerCertificateState.__new__(_ServerCertificateState)\n\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"certificate_body\"] = certificate_body\n __props__.__dict__[\"certificate_chain\"] = certificate_chain\n __props__.__dict__[\"expiration\"] = expiration\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"name_prefix\"] = name_prefix\n __props__.__dict__[\"path\"] = path\n __props__.__dict__[\"private_key\"] = private_key\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"upload_date\"] = upload_date\n return ServerCertificate(resource_name, opts=opts, __props__=__props__)", "async def get_certificate_version(\n self, certificate_name: str, version: str, **kwargs\n ) -> KeyVaultCertificate:\n bundle = await self._client.get_certificate(\n vault_base_url=self.vault_url,\n certificate_name=certificate_name,\n certificate_version=version,\n **kwargs\n )\n return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle)", "def test_get_certificate_none(self):\n context = Context(SSLv23_METHOD)\n client = Connection(context, None)\n cert = client.get_certificate()\n assert cert is None", "def fusion_api_get_ca_certificate(self, uri=None, api=None, headers=None, param=''):\n return self.ca.get(uri=uri, api=api, headers=headers, param=param)", "def dcos_ca_bundle():\n resp = sdk_cmd.cluster_request('GET', '/ca/dcos-ca.crt')\n cert = resp.content.decode('ascii')\n assert cert is not None\n return cert", "def _get_ca_bundle():\n try:\n import certifi\n return certifi.where()\n except ImportError:\n pass", "def replace_certificate(self):\n return self.__query(\"certificateReplace\", data)", "def get_certificate(self, url):\n bearer = 'Authorization: Bearer '+str(self.exchanged_token).split('\\n', 1)[0]\n data = json.dumps({\"service_id\": \"x509\"})\n\n headers = StringIO()\n buffers = StringIO()\n\n c = pycurl.Curl()\n c.setopt(pycurl.URL, url)\n c.setopt(pycurl.HTTPHEADER, [bearer, 'Content-Type: application/json'])\n c.setopt(pycurl.POST, 1)\n c.setopt(pycurl.POSTFIELDS, data)\n c.setopt(c.WRITEFUNCTION, buffers.write)\n c.setopt(c.HEADERFUNCTION, headers.write)\n c.setopt(c.VERBOSE, True)\n\n try:\n c.perform()\n status = c.getinfo(c.RESPONSE_CODE)\n c.close()\n body = buffers.getvalue()\n\n if str(status) != \"303\" :\n self.log.error(\"On \\\"get redirect curl\\\": %s , http error: %s \" % (body, str(status)))\n return False \n except pycurl.error, error:\n errno, errstr = error\n self.log.info('An error occurred: %s' % errstr)\n return False\n \n redirect = self.tts\n for item in headers.getvalue().split(\"\\n\"):\n if \"location\" in item:\n redirect = redirect + item.strip().replace(\"location: \", \"\")\n\n headers = {'Authorization': 'Bearer ' + self.exchanged_token.strip()}\n response = requests.get(redirect, headers=headers)\n\n try:\n response.raise_for_status()\n except requests.exceptions.HTTPError as e:\n # Whoops it wasn't a 200\n self.log.error(\"get_certificate() Error: %s \" %str(e))\n return False\n\n with open('/tmp/output.json', 'w') as outf:\n outf.write(response.content)\n else:\n self.log.error(\"No location in redirect response\")\n\n return True", "def test_cert_request(self):\n oim = OIM()\n rc, _, _, msg = oim.request('--hostname', 'test.' + DOMAIN)\n self.assertEqual(rc, 0, \"Failed to request certificate\\n%s\" % msg)\n self.assert_(oim.reqid != '', msg)", "def test_get_certificate(self):\n chain = _create_certificate_chain()\n [(cakey, cacert), (ikey, icert), (skey, scert)] = chain\n\n context = Context(SSLv23_METHOD)\n context.use_certificate(scert)\n client = Connection(context, None)\n cert = client.get_certificate()\n assert cert is not None\n assert \"Server Certificate\" == cert.get_subject().CN", "def certificate(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"certificate\")", "def ModifyCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"ModifyCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.ModifyCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def create_ssl_cert_request ( ssl_hostnames ) :\n first_hostname = ssl_hostnames[ 0 ]\n csr_filename = get_ssl_csr_filename( first_hostname )\n key_filename = get_ssl_key_filename( first_hostname )\n openssl_cnf = \"\"\"\n[req]\ndistinguished_name = req_distinguished_name\nreq_extensions = san_ext\n\n[req_distinguished_name]\ncountryName_default = US\nstateOrProvinceName_default = New York\nlocalityName_default = New York\norganizationalUnitName_default = Home Box Office, Inc\ncommonName_default = \"\"\" + first_hostname + \"\"\"\n\n[san_ext]\nbasicConstraints = CA:FALSE\nkeyUsage = nonRepudiation, digitalSignature, keyEncipherment\nsubjectAltName = @sans\n\n[sans]\n\"\"\"\n counter = 0\n for hostname in ssl_hostnames :\n counter += 1\n openssl_cnf += 'DNS.' + str( counter ) + ' = ' + hostname + '\\n'\n\n with open( first_hostname, 'w' ) as f :\n f.write( openssl_cnf )\n cmd = 'openssl req -new -newkey rsa:2048 -nodes -out ' + csr_filename + ' -keyout ' + key_filename\n cmd += ' -config ' + first_hostname + ' -subj \"/C=US/ST=New York/L=New York/O=Home Box Office Inc/CN=' + first_hostname + '\"'\n keygen = subprocess.call( cmd, shell = True )\n os.remove( first_hostname )\n if keygen != 0 :\n print \"Generation of SSL request failed!\"\n return None\n\n return { 'csr-filename' : csr_filename, 'key-filename' : key_filename }", "def __init__(__self__, *,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[str]] = None,\n certificate_name: Optional[pulumi.Input[str]] = None,\n domain: Optional[pulumi.Input[str]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None):\n if certificate is not None:\n pulumi.set(__self__, \"certificate\", certificate)\n if certificate_id is not None:\n pulumi.set(__self__, \"certificate_id\", certificate_id)\n if certificate_name is not None:\n pulumi.set(__self__, \"certificate_name\", certificate_name)\n if domain is not None:\n pulumi.set(__self__, \"domain\", domain)\n if instance_id is not None:\n pulumi.set(__self__, \"instance_id\", instance_id)\n if private_key is not None:\n pulumi.set(__self__, \"private_key\", private_key)", "def fusion_api_get_client_certificate(self, ip, api=None, headers=None):\n return self.client_certificate.get(ip, api, headers)", "def parse_certificate(data):\n\n if not isinstance(data, byte_cls):\n raise TypeError(pretty_message(\n '''\n data must be a byte string, not %s\n ''',\n type_name(data)\n ))\n\n key_type = None\n\n # Appears to be PEM formatted\n if re.match(b'\\\\s*-----', data) is not None:\n key_type, _, data = _unarmor_pem(data)\n\n if key_type == 'private key':\n raise ValueError(pretty_message(\n '''\n The data specified does not appear to be a certificate, but\n rather a private key\n '''\n ))\n\n if key_type == 'public key':\n raise ValueError(pretty_message(\n '''\n The data specified does not appear to be a certificate, but\n rather a public key\n '''\n ))\n\n if key_type is None or key_type == 'certificate':\n try:\n return Certificate.load(data)\n except (ValueError):\n pass # Data was not a Certificate\n\n raise ValueError(pretty_message(\n '''\n The data specified does not appear to be a known certificate format\n '''\n ))", "def info_from_args(args):\n return CertInfo(\n subject=parse_dn(args.subject),\n usage=parse_list(args.usage),\n alt_names=parse_list(args.san),\n ocsp_nocheck=args.ocsp_nocheck,\n ocsp_must_staple=args.ocsp_must_staple,\n ocsp_must_staple_v2=args.ocsp_must_staple_v2,\n ocsp_urls=parse_list(args.ocsp_urls),\n crl_urls=parse_list(args.crl_urls),\n issuer_urls=parse_list(args.issuer_urls),\n permit_subtrees=parse_list(args.permit_subtrees),\n exclude_subtrees=parse_list(args.exclude_subtrees),\n ca=args.CA,\n path_length=args.path_length)", "def _ParseCertificateArguments(client, args):\n self_managed = None\n managed = None\n certificate_type = None\n if args.certificate:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.SELF_MANAGED\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n self_managed = client.messages.SslCertificateSelfManagedSslCertificate(\n certificate=certificate, privateKey=private_key)\n if args.domains:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.MANAGED\n managed = client.messages.SslCertificateManagedSslCertificate(\n domains=args.domains)\n return certificate_type, self_managed, managed", "def load_cert(file):\n with open(file, \"r\") as pemfile:\n cert_content = pemfile.read()\n cert_stripped = \"\".join(\n [line for line in cert_content.splitlines() if \"CERTIFICATE\" not in line])\n\n logging.info('Loaded certificate from {}'.format(file))\n return cert_stripped", "def request_cert(session, domain_name, validation_domain):\n if session is None:\n return None\n\n client = session.client('acm')\n validation_options = [\n {\n 'DomainName': domain_name,\n 'ValidationDomain': validation_domain\n },\n ]\n response = client.request_certificate(DomainName=domain_name,\n DomainValidationOptions=validation_options)\n return response", "def ssl(self, cainfo=None, verify=True, cert=None, key=None):\n if cainfo:\n self.curl.setopt(pycurl.CAINFO, cainfo)\n\n if verify == False:\n self.curl.setopt(pycurl.SSL_VERIFYPEER, 0)\n self.curl.setopt(pycurl.SSL_VERIFYHOST, 0)\n else:\n self.curl.setopt(pycurl.SSL_VERIFYPEER, 1)\n self.curl.setopt(pycurl.SSL_VERIFYHOST, 2)\n if cert:\n #self.curl.setopt(pycurl.SSLCERTTYPE, \"DER\")\n self.curl.setopt(pycurl.SSLCERT, cert)\n if key:\n self.curl.setopt(pycurl.SSLKEY, key)", "async def get_certificate(self, certificate_name: str, **kwargs) -> KeyVaultCertificate:\n bundle = await self._client.get_certificate(\n vault_base_url=self.vault_url,\n certificate_name=certificate_name,\n certificate_version=\"\",\n **kwargs\n )\n return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle)", "def course_certificate(\n self, request, uuid, *args, **kwargs\n ): # pylint: disable=unused-argument\n # Try to fetch a certificate by the uuid passed in the URL\n try:\n certificate = CourseRunCertificate.objects.get(uuid=uuid)\n except CourseRunCertificate.DoesNotExist:\n raise Http404()\n\n # Get a CertificatePage to serve this request\n certificate_page = (\n certificate.course_run.course.page.certificate_page\n if certificate.course_run.course.page\n else None\n )\n if not certificate_page:\n raise Http404()\n\n certificate_page.certificate = certificate\n return certificate_page.serve(request)", "def get(\n self, certificate_name, resource_group_name, provisioning_service_name, if_match=None, custom_headers=None, raw=False, **operation_config):\n # Construct URL\n url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}/certificates/{certificateName}'\n path_format_arguments = {\n 'certificateName': self._serialize.url(\"certificate_name\", certificate_name, 'str'),\n 'subscriptionId': self._serialize.url(\"self.config.subscription_id\", self.config.subscription_id, 'str'),\n 'resourceGroupName': self._serialize.url(\"resource_group_name\", resource_group_name, 'str'),\n 'provisioningServiceName': self._serialize.url(\"provisioning_service_name\", provisioning_service_name, 'str')\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {}\n query_parameters['api-version'] = self._serialize.query(\"self.api_version\", self.api_version, 'str')\n\n # Construct headers\n header_parameters = {}\n header_parameters['Content-Type'] = 'application/json; charset=utf-8'\n if self.config.generate_client_request_id:\n header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())\n if custom_headers:\n header_parameters.update(custom_headers)\n if if_match is not None:\n header_parameters['If-Match'] = self._serialize.header(\"if_match\", if_match, 'str')\n if self.config.accept_language is not None:\n header_parameters['accept-language'] = self._serialize.header(\"self.config.accept_language\", self.config.accept_language, 'str')\n\n # Construct and send request\n request = self._client.get(url, query_parameters)\n response = self._client.send(request, header_parameters, stream=False, **operation_config)\n\n if response.status_code not in [200]:\n raise models.ErrorDetailsException(self._deserialize, response)\n\n deserialized = None\n\n if response.status_code == 200:\n deserialized = self._deserialize('CertificateResponse', response)\n\n if raw:\n client_raw_response = ClientRawResponse(deserialized, response)\n return client_raw_response\n\n return deserialized", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n csr: Optional[pulumi.Input[str]] = None,\n expires_on: Optional[pulumi.Input[str]] = None,\n hostnames: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n min_days_for_renewal: Optional[pulumi.Input[int]] = None,\n request_type: Optional[pulumi.Input[str]] = None,\n requested_validity: Optional[pulumi.Input[int]] = None) -> 'OriginCaCertificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _OriginCaCertificateState.__new__(_OriginCaCertificateState)\n\n __props__.__dict__[\"certificate\"] = certificate\n __props__.__dict__[\"csr\"] = csr\n __props__.__dict__[\"expires_on\"] = expires_on\n __props__.__dict__[\"hostnames\"] = hostnames\n __props__.__dict__[\"min_days_for_renewal\"] = min_days_for_renewal\n __props__.__dict__[\"request_type\"] = request_type\n __props__.__dict__[\"requested_validity\"] = requested_validity\n return OriginCaCertificate(resource_name, opts=opts, __props__=__props__)", "def cert(self, value):\n self._cert = value", "def load(cls, cert_path: Union[Path, str], key_path: Union[Path, str]) -> \"CertificateAuthority\":\n cert_path, key_path = Path(cert_path), Path(key_path)\n\n with cert_path.open(\"rb\") as file:\n cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, file.read())\n\n with key_path.open(\"rb\") as file:\n key = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, file.read())\n\n return cls(key, cert)", "def _check_ca_certificate(self):\n if not os.path.exists(self._ca_certificate_path):\n with open(self._ca_certificate_path, \"w\") as f:\n f.write(ssl.get_server_certificate((\"127.0.0.1\", self._app_port), ssl_version=ssl.PROTOCOL_TLSv1_2))", "def find_certificate(p): # find_certificate(props, /)\n\n for page in acm.get_paginator('list_certificates').paginate():\n for certificate in page['CertificateSummaryList']:\n log_info(certificate)\n\n if p['DomainName'].lower() == certificate['DomainName']:\n tags = {tag['Key']: tag['Value'] for tag in\n acm.list_tags_for_certificate(**{'CertificateArn': certificate['CertificateArn']})['Tags']}\n\n if (tags.get('cloudformation:' + 'logical-id') == e['LogicalResourceId'] and\n tags.get('cloudformation:' + 'stack-id') == e['StackId'] and\n tags.get('cloudformation:' + 'properties') == hash_func(p)\n ):\n return certificate['CertificateArn']", "def initial_setup():\n\n if os.path.exists(cfg.ca_private_key_path()):\n pkey = _try_load_ca_private_key(cfg.ca_private_key_path())\n else:\n pkey = _generate_ca_private_key(cfg.ca_private_key_path())\n\n if os.path.exists(cfg.ca_cert_path()):\n _try_load_ca_cert(cfg.ca_cert_path())\n else:\n _generate_ca_cert(cfg.ca_cert_path(), pkey)", "def cert(self):\n return self._cert", "def load_cert_string(string, format=FORMAT_PEM):\n bio = BIO.MemoryBuffer(string)\n return load_cert_bio(bio, format)", "def get_certificate(self, path: Union[bytes, str]) -> str:\n path = _to_bytes_or_null(path)\n certificate = ffi.new(\"char **\")\n ret = lib.Fapi_GetCertificate(self._ctx, path, certificate)\n _chkrc(ret)\n # certificate is guaranteed to be a null-terminated string\n return ffi.string(_get_dptr(certificate, lib.Fapi_Free)).decode()", "async def import_certificate(\n self, certificate_name: str, certificate_bytes: bytes, **kwargs\n ) -> KeyVaultCertificate:\n\n enabled = kwargs.pop(\"enabled\", None)\n policy = kwargs.pop(\"policy\", None)\n\n if enabled is not None:\n attributes = self._models.CertificateAttributes(enabled=enabled)\n else:\n attributes = None\n base64_encoded_certificate = base64.b64encode(certificate_bytes).decode(\"utf-8\")\n\n parameters = self._models.CertificateImportParameters(\n base64_encoded_certificate=base64_encoded_certificate,\n password=kwargs.pop(\"password\", None),\n certificate_policy=policy._to_certificate_policy_bundle() if policy else None,\n certificate_attributes=attributes,\n tags=kwargs.pop(\"tags\", None),\n )\n\n bundle = await self._client.import_certificate(\n vault_base_url=self.vault_url,\n certificate_name=certificate_name,\n parameters=parameters,\n **kwargs\n )\n return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle)", "def get_certificate_from_file(file_path):\n LOG.debug(\"extracting information of certificate in %s\" % file_path)\n try:\n with open(file_path, 'rb') as file_data:\n file_data.seek(0, os.SEEK_SET)\n read_file = file_data.read()\n certificate = extract_certs_from_pem(read_file)[0]\n except Exception as e:\n LOG.warning(\"No certificate was extracted from file %s\"\n \"due to %s\" % (file_path, e))\n return None\n return certificate", "def get_cert_content(certificate):\n cert_object = crypto.load_certificate(crypto.FILETYPE_PEM, certificate)\n cert_content = crypto.dump_certificate(crypto.FILETYPE_TEXT, cert_object)\n return cert_content", "def _validate_cert(self):\r\n cert = self.handle.getpeercert()\r\n self.peercert = cert\r\n if 'subject' not in cert:\r\n raise TTransportException(type=TTransportException.NOT_OPEN,\r\n message='No SSL certificate found from %s:%s' % (self.host, self.port))\r\n fields = cert['subject']\r\n for field in fields:\r\n # ensure structure we get back is what we expect\r\n if not isinstance(field, tuple):\r\n continue\r\n cert_pair = field[0]\r\n if len(cert_pair) < 2:\r\n continue\r\n cert_key, cert_value = cert_pair[0:2]\r\n if cert_key != 'commonName':\r\n continue\r\n certhost = cert_value\r\n if certhost == self.host:\r\n # success, cert commonName matches desired hostname\r\n self.is_valid = True\r\n return \r\n else:\r\n raise TTransportException(type=TTransportException.UNKNOWN,\r\n message='Host name we connected to \"%s\" doesn\\'t match certificate provided commonName \"%s\"' % (self.host, certhost))\r\n raise TTransportException(type=TTransportException.UNKNOWN,\r\n message='Could not validate SSL certificate from host \"%s\". Cert=%s' % (self.host, cert))", "def CreateCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"CreateCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.CreateCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def get_certificate_locally(self, cert_name):\n #TODO: this should be made asynchronous as it reads from filessystem\n _log.debug(\"get_certificate_locally:\\n\\tmy_node_name={}\\n\\tcert_name={}\\n\\t\".format(self.node_name, cert_name))\n if cert_name == self.node_id:\n _log.debug(\"Look for runtimes own certificate {} in {{mine}} folder, err={}\".format(cert_name, err))\n try:\n# certpath = self.get_own_cert_path()\n# self.certificate.truststore_transport.verify_certificate_from_path(certpath)\n# with open(certpath, 'rb') as fd:\n# certstr=fd.read()\n return self.cert_str\n except Exception as err:\n _log.debug(\"Certificate {} is not in {{mine}} folder, return None, err={}\".format(cert_name, err))\n return None\n else:\n if cert_name in self.cert_dict:\n return self.cert_dict[cert_name]\n else:\n try:\n _log.debug(\"Look for certificate in others folder, cert_name={}\".format(cert_name))\n # Check if the certificate is in the 'others' folder for runtime my_node_name.\n files = os.listdir(os.path.join(self.runtime_dir, \"others\"))\n matching = [s for s in files if cert_name in s]\n certpath = os.path.join(self.runtime_dir, \"others\", matching[0])\n self.certificate.truststore_transport.verify_certificate_from_path(certpath)\n with open(certpath, 'rb') as fd:\n certstr=fd.read()\n #TODO: some cleaning of self.cert_dict is probably a good idea\n self.cert_dict[cert_name]=certstr\n return certstr\n except Exception as err:\n _log.debug(\"Certificate {} is not in {{others}} folder, return None, err={}\".format(cert_name, err))\n return None", "def fusion_api_get_remote_certificate(self, ip, api=None, headers=None):\n return self.remote_certificate.get(ip, api, headers)", "def __init__(__self__,\n resource_name: str,\n args: CertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def test_source_from_url_certificate(self):\n with patch(\"sprinter.lib.cleaned_request\") as cleaned_request:\n mock = Mock(spec=Response)\n mock.text = old_manifest\n cleaned_request.return_value = mock\n TEST_URI = \"https://testme.com/test.cfg\"\n load_manifest(TEST_URI, verify_certificate=False)\n cleaned_request.assert_called_with(\"get\", TEST_URI, verify=False)", "def request_certificate(request):\r\n if request.method == \"POST\":\r\n if request.user.is_authenticated():\r\n xqci = XQueueCertInterface()\r\n username = request.user.username\r\n student = User.objects.get(username=username)\r\n course_key = SlashSeparatedCourseKey.from_deprecated_string(request.POST.get('course_id'))\r\n course = modulestore().get_course(course_key, depth=2)\r\n\r\n status = certificate_status_for_student(student, course_key)['status']\r\n if status in [CertificateStatuses.unavailable, CertificateStatuses.notpassing, CertificateStatuses.error]:\r\n logger.info('Grading and certification requested for user {} in course {} via /request_certificate call'.format(username, course_key))\r\n status = xqci.add_cert(student, course_key, course=course)\r\n return HttpResponse(json.dumps({'add_status': status}), mimetype='application/json')\r\n return HttpResponse(json.dumps({'add_status': 'ERRORANONYMOUSUSER'}), mimetype='application/json')", "def new_X509(self, host: str) -> Tuple[OpenSSL.crypto.X509, OpenSSL.crypto.PKey]: # pylint: disable=invalid-name\n\n # Generate a new key pair.\n key = new_RSA()\n\n # Generates new X509Request.\n req = OpenSSL.crypto.X509Req()\n req.get_subject().CN = host.encode(\"utf-8\")\n req.set_pubkey(key)\n req.sign(key, \"sha256\")\n\n # Generates new X509 certificate.\n cert = new_X509(common_name=host)\n cert.set_issuer(self.cert.get_subject())\n cert.set_pubkey(req.get_pubkey())\n\n # Sets the certificate 'subjectAltName' extension.\n hosts = [f\"DNS:{host}\"]\n\n if is_ip(host):\n hosts += [f\"IP:{host}\"]\n else:\n hosts += [f\"DNS:*.{host}\"]\n\n hosts = \", \".join(hosts).encode(\"utf-8\")\n cert.add_extensions([OpenSSL.crypto.X509Extension(b\"subjectAltName\", False, hosts)])\n\n # Signs the certificate with the CA's key.\n cert.sign(self.key, \"sha256\")\n\n return cert, key", "def test_use_certificate_uninitialized(self, ctx_or_conn):\n with pytest.raises(Error):\n ctx_or_conn.use_certificate(X509())", "def load_cert_bio(bio, format=FORMAT_PEM):\n if format == FORMAT_PEM:\n cptr = m2.x509_read_pem(bio._ptr())\n elif format == FORMAT_DER:\n cptr = m2.d2i_x509(bio._ptr())\n else:\n raise ValueError(\"Unknown format. Must be either FORMAT_DER or FORMAT_PEM\")\n if cptr is None:\n raise X509Error(Err.get_error())\n return X509(cptr, _pyfree=1)", "def test_get_certificate_by_id(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.get(\n '/api/v1/certificates/1', content_type='application/json',\n headers=self.get_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate retrieved successfully')\n assert response.status_code == 200" ]
[ "0.6680185", "0.63726574", "0.63267094", "0.6285823", "0.6274304", "0.62612367", "0.6248355", "0.6233824", "0.62313014", "0.61848396", "0.6153183", "0.6125429", "0.61140054", "0.59716755", "0.59682465", "0.5947671", "0.58962196", "0.58797115", "0.5856209", "0.5856209", "0.585408", "0.5787375", "0.57101405", "0.5704164", "0.5688496", "0.56597394", "0.56583077", "0.56583077", "0.56150645", "0.5587778", "0.55606", "0.5556594", "0.5539577", "0.55300695", "0.55180925", "0.5489227", "0.5489227", "0.5489227", "0.5489227", "0.5489227", "0.5489227", "0.5486368", "0.5479544", "0.54734725", "0.5464233", "0.54378957", "0.5418159", "0.5404553", "0.5392408", "0.5368225", "0.53497916", "0.53358155", "0.53208876", "0.5318726", "0.5316908", "0.53152114", "0.531244", "0.53113633", "0.530672", "0.5287503", "0.52868235", "0.5284717", "0.528172", "0.528172", "0.52778536", "0.52612597", "0.52509266", "0.52487713", "0.5242473", "0.5235791", "0.5231843", "0.52284163", "0.5224676", "0.52226025", "0.5211217", "0.5209863", "0.5204019", "0.5197975", "0.51964164", "0.51843166", "0.5173358", "0.51647156", "0.516315", "0.5159956", "0.51541674", "0.51414764", "0.5140275", "0.51296306", "0.51156145", "0.5115181", "0.51143295", "0.51138306", "0.5109051", "0.51067245", "0.5103109", "0.5101429", "0.50907856", "0.5081999", "0.507903", "0.50733566" ]
0.5833314
21
Walk oid list, return keywords.
def extract_xkey_usage(self, ext): oidmap = {v: k for k, v in XKU_CODE_TO_OID.items()} res = [] for oid in ext: if oid in oidmap: res.append(oidmap[oid]) else: raise InvalidCertificate("Unsupported ExtendedKeyUsage oid: %s" % (oid,)) return res
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extract_keywords(self):\n keywords = [] \n for keyword in self.watsonLanguageModel['keywords'][:self.entitySizeLimit]: \n keywords.append(keyword['text'])\n return keywords", "def get_keywords(self):\r\n\t\treturn list(self.keyword_headlines().keys())", "def extract_keywords(raw_text,id):\n\n print(\"Extracting keywords for \"+id)\n\n stemmer = nltk.PorterStemmer()\n\n # Construct text\n\n # Tokens\n tokens = nltk.word_tokenize(raw_text)\n # filter undesirable words and format\n words = [w.replace('\\'','') for w in tokens if len(w)>=3]\n text = nltk.Text(words)\n\n tagged_text = nltk.pos_tag(text)\n #nouns = [tg[0] for tg in tagged_text if tg[1]=='NN' or tg[1]=='NNP' ]\n #print(nouns)\n\n # multi-term\n multiterms = set()\n stem_dico = {}\n for i in range(len(tagged_text)) :\n # max length 4 for multi-terms ==> 3\n for l in range(1,4) :\n if i+l < len(tagged_text) :\n tags = [tagged_text[k] for k in range(i,i+l)]\n if potential_multi_term(tags) :\n multistemlist = [str.lower(stemmer.stem(tagged_text[k][0])) for k in range(i,i+l)]\n #multistem.sort(key=str.lower)\n\t\t #python 3 : remove .encode('ascii','ignore')\n multistem = functools.reduce(lambda s1,s2 : s1+' '+s2,multistemlist)\n rawtext = functools.reduce(lambda s1,s2 : s1+' '+s2,[str.lower(tagged_text[k][0]) for k in range(i,i+l)])\n multiterms.add(multistem)\n if multistem in stem_dico :\n stem_dico[multistem].add(rawtext)\n else :\n stem_dico[multistem] = set([rawtext])\n\n return [list(multiterms),stem_dico]", "def findall(ctx):\n _check_for_commands(ctx.obj[\"keep_path\"])\n keep = ctx.obj[\"keep\"]\n results = {}\n for kw, command_ids in keep[\"keyword2Ids\"].items():\n results[kw] = []\n for command_id in command_ids:\n command = keep[\"id2Command\"][str(command_id)]\n explanation = keep[\"id2Explanation\"][str(command_id)]\n results[kw].append({ \n \"id\": command_id,\n \"command\": command,\n \"explanation\": explanation\n })\n _show_results(results)", "def _generate_keywords(self):\n _keywords = [*self._lookup_opcodes_dir.keys(), *self._registers_list.keys()]\n for key in _keywords:\n self._keywords.extend(key.split(\" \"))\n return", "def index_pdir_keywords_IEntry(object, **kw):\n out = []\n for name in object.aq_parent.filter_fields:\n if getattr(object, name, None) is None:\n continue\n elif hasattr(getattr(object, name), \"__iter__\"):\n for v in getattr(object, name):\n out.append(\"%s:%s\" % (name, v))\n else:\n out.append(\"%s:%s\" % (name, getattr(object, name)))\n return out", "def add_keywords(self, response: Response) -> list:\n return response.xpath(\"//ul[@class='term']/li/a/text()\").getall()", "def get_keywords(seq):\r\n if len(seq) = 0:\r\n return None\r\n freqs = {}\r\n for w in seq: \r\n if w not in freqs:\r\n\t freqs[w] = 1\r\n\telse\r\n\t freqs[w] += 1\r\n num_keys = len(freqs)\r\n res = []\r\n \r\n return res", "def keyword_extraction(file_content):\n\n # [question, question....]\n for key, value in file_content.items():\n seg, hidden = ltp.seg([key])\n # ner: [[('Nh', 2, 2)]]\n ner = ltp.ner(hidden)\n # keywords: [('PERSON', \"吴轩\")], tuple_item: ('Nh', 2, 2)\n keywords = [(tag_to_name[tuple_item[0]], to_string(seg[0][tuple_item[1]: tuple_item[2]+1])) for tuple_item in ner[0]]\n file_content[key].keywords = keywords\n\n return file_content", "def __get_keywords(self, text_list):\r\n specialKW = [\r\n 'run keyword',\r\n 'run keyword and continue on failure',\r\n 'run keyword and expect error',\r\n 'run keyword and ignore error',\r\n 'run keyword and return'\r\n 'run keyword and return if',\r\n 'run keyword and return status',\r\n 'run keyword if',\r\n 'run keyword if all critical tests passed',\r\n 'run keyword if all tests passed',\r\n 'run keyword if any critical tests failed',\r\n 'run keyword if any tests failed',\r\n 'run keyword if test failed',\r\n 'run keyword if test passed',\r\n 'run keyword if timeout occurred',\r\n 'run keyword unless',\r\n 'run keywords',\r\n 'wait until keyword succeeds',\r\n 'repeat keyword',\r\n 'else'\r\n ]\r\n specialSettings = [\r\n '[Arguments]',\r\n '[Documentation]'\r\n ]\r\n L = []\r\n if text_list[0] in specialSettings:\r\n return L\r\n for item in text_list:\r\n if self.__is_keyword(item):\r\n L.append(item)\r\n if not item.replace('_', ' ').replace('-', ' ').lower() in specialKW:\r\n break\r\n return L", "def get_keywords(self, sectioned_text):\n \n keywords = []\n \n if 'full text' in list(sectioned_text.keys()):\n \n for word in self.keyword_list:\n if word in sectioned_text['full text']:\n keywords.append(word)\n \n else: \n fulltext = self.restitch_text(sectioned_text)\n for word in self.keyword_list:\n if word in fulltext:\n keywords.append(word)\n \n return keywords", "def list_keywords(self, **kwargs) -> ApiResponse:\n return self._request(kwargs.pop('path'), params=kwargs)", "def iteritems(self, keyword):", "def get_keywords(keyword_list: List[Tuple[str, str]], keyword_type: str) -> List[str]:\n keywords = [x[0] for x in keyword_list if x[1].startswith(keyword_type)]\n\n return keywords", "def walk(self, oid):\n oid = str_to_oid(oid)\n result = []\n walker = Walker(self, oid,\n use_bulk=self.settings_read[\"use_bulk\"],\n bulk_rows=self.settings_read[\"bulk_rows\"])\n for rows in walker:\n result.extend(rows)\n return result", "def test_caom_instrument_keywords():\n kw = []\n for ins in JWST_INSTRUMENTS:\n kw.append(mm.instrument_keywords(ins, caom=True)['keyword'].tolist())\n\n assert kw[0] == kw[1] == kw[2] == kw[3] == kw[4]", "def keywords(self, **kwargs):\n\n path = self._get_movie_id_path('keywords')\n resp = self._get_method(path, kwargs)\n return resp", "def get_keywords(self):\n all_keywords = []\n z_index = 0\n for zettel in self.lemma_tokens:\n keywords = []\n w_index = 0\n cur_zettel_dict = {}\n for word in zettel:\n cur_zettel_dict.setdefault(word[0], 0)\n cur_word_total_score = self.all_scores[z_index][w_index]\n if cur_zettel_dict[word[0]] > cur_word_total_score:\n w_index += 1\n continue\n else:\n cur_zettel_dict[word[0]] = cur_word_total_score\n w_index += 1\n cur_sorted = sorted(cur_zettel_dict.items(), key=lambda kv: kv[1], reverse=True)\n for i in range(self.keyword_n):\n keywords.append(str(cur_sorted[i]))\n z_index += 1\n all_keywords.append(keywords)\n return all_keywords", "def __iter__(self):\n for keyword in self.meta.findall(CN('meta:keyword')):\n yield keyword.text", "def get_keywords_for_movie(url):\n pass", "def get_ids(self, text):\n\n tokens = [token.orth for token in self.tokenizer(text)]\n ids = []\n for token in tokens:\n try:\n id = self.vocab.vectors.key2row[token]\n except KeyError:\n id = self.oov_id\n\n ids.append(id)\n\n return ids", "def get_keywords(source_or_file):\n tree = get_ast(source_or_file)\n lister = KeywordLister().visit(tree)\n return lister.data", "def list_all_tags(self,obs):", "def _get_vocab_id_list(self, json_obj):\n return json_obj", "def set_keyword_map(self):\n \n ret = defaultdict(list)\n for idx, doc in enumerate(self.docs):\n for token in doc:\n if token in self.dictionary.token2id:\n ret[token].append(idx)\n \n self.keyword_map = ret\n return ret", "def scan_individual_identifiers(text: str, cpf: bool = True) -> List[str]:\n if cpf:\n regex = re.compile(r\"\\w{3}\\.\\w{3}\\.\\w{3}\\-\\w{2}\")\n else:\n regex = re.compile(r\"\\w{2}\\.\\w{3}\\.\\w{3}/\\w{4}\\-\\w{2}\")\n\n identifiers = re.findall(regex, text)\n return identifiers", "def get_ids(self, text):\n\n tokens = [token.orth for token in self.tokenizer(text)]\n ids = []\n for token in tokens:\n try:\n id = self._vocab.vectors.key2row[token]\n except KeyError:\n id = self.oov_id\n\n ids.append(id)\n\n return ids", "def find_keywords(anchor, keywords=['']):\n rel_keywords = []\n href, content = parse_anchor(anchor)\n \n for keyword in keywords:\n kw = keyword.lower()\n if kw in href.lower() or kw in content.lower():\n rel_keywords.append(keyword)\n \n return rel_keywords", "def get_keywords():\n \n #get all movies from db\n movies_df = movie_helper.get_movies_df() \n \n with tqdm(total=len(movies_df)) as pbar:\n for index, row in movies_df.iterrows(): \n \n #if imbdid exists use it to look up the API\n if (row['imdbId']):\n \n #get list of keywords and created delimted string\n movie = ia.get_movie(str(row['imdbId']), info='keywords')\n try:\n keywords = \",\".join(movie['keywords'])\n except:\n keywords = None\n \n #update the movies table in the db\n database_helper.update_data(\"movies\", update_params = {\"keywords\" : keywords}, select_params = {\"movieId\" : row[\"movieId\"]})\n pbar.update(1)", "def get_all_keywords(resource):\n keywords = []\n resource.populate()\n for res in [i for i in resource.imports.data if isinstance(i, robot.parsing.settings.Resource)]:\n keyword_file = os.path.abspath('{}/{}'.format(res.directory, res.name))\n if keyword_file not in processed:\n res_obj = ResourceFile(keyword_file)\n processed[keyword_file] = res_obj\n keywords += get_all_keywords(res_obj)\n for keyword in resource.keywords:\n print(keyword.name)\n keywords.append(tuple((keyword.source, keyword.name, keyword.args.value if keyword.args.value else [])))\n return keywords", "def _find_entities(tagged_data, retrieval_data):\n ret = list()\n assert len(tagged_data) == retrieval_data.shape[0], \"Please make sure that tagged data and retrieval data entries align for ID tagging.\"\n ids = retrieval_data.original_id.tolist()\n for i, (sentence, entity_dict) in enumerate(tagged_data):\n ignore_indices, relevant_tokens, relevant_contexts = list(), list(), list()\n ignore_substrings = \" \".join([sentence[start_idx:end_idx] for [start_idx, end_idx, type_ent] in entity_dict['entities'] if type_ent in ignore_ents])\n doc = nlp(sentence)\n #print(\"sentence:\", sentence)\n #print(\"ignoreing substrings:\", ignore_substrings)\n #print(\"found nouns:\")\n for j, token in enumerate(doc):\n if token.tag_.startswith(\"N\") and not token.text in ignore_substrings:#noun which is not ignored\n #print(\"token:\", token.text)\n #print(\"token dep type:\", token.dep_)\n #print(\"token head text:\", token.head.text)\n #print(\"token head pos\", token.head.pos_)\n #print(\"children:\", [child for child in token.children])\n relevant_tokens.append(token.text)\n window_left = min(0, j-2)\n window_right = min(j+3, len(doc))\n relevant_contexts.append(\" \".join([token.text for token in doc[window_left:window_right]]))\n ret.append([ids[i], sentence, relevant_tokens, relevant_contexts])\n with open(\"output/untagged_nouns.pkl\", \"wb\") as outf:\n pickle.dump(ret, outf)\n return ret", "def extract_features(data, stopwords=STOPWORDS):\n tags = set()\n docs = []\n for document in data:\n doc_data = dict()\n doc_data['pmid'] = document['sourceid']\n text = document['text']\n\n # Insert PubTator annotations inside abstract\n denotations = document['denotations']\n sorted_denotations = []\n for denotation in denotations:\n begin = denotation['span']['begin']\n end = denotation['span']['end']\n obj = denotation['obj']\n for c in punctuation:\n obj = obj.replace(c, '')\n tags.add(obj)\n doc_data[obj] = doc_data.get(obj,0)+1\n sorted_denotations.append([begin,end,obj])\n sorted_denotations.sort()\n sorted_denotations.reverse()\n for begin, end, obj in sorted_denotations:\n text = text[:begin] + obj + ' ' + text[end:]\n\n doc_data['text'] = clean_text(text, stopwords)\n docs.append(doc_data)\n\n return docs", "def get_keywords(self, number=10):\n keyword = []\n node_weight = OrderedDict(sorted(self.node_weight.items(), key=lambda t: t[1], reverse=True))\n for i, (key, value) in enumerate(node_weight.items()):\n # print(key + ' - ' + str(value))\n keyword.append(key)\n if i > number:\n break\n return keyword", "def get_keywords(self):\n keys = []\n for post in self:\n keys.extend(post.Keywords)\n return list(sorted(set(keys)))", "def _keyword_search(id_to_text, raw_keywords, modified_keywords):\n\t# The raw keywords and modified keywords should be two paired lists where the elements correspond to one another.\n\t# The modifications done to the keywords should already match the modifications done to the texts in the input dictionary so they can be directly compared.\n\tassert len(raw_keywords) == len(modified_keywords)\n\tid_to_found_keywords = {i:[r_kw for r_kw,m_kw in zip(raw_keywords,modified_keywords) if m_kw in text] for i,text in id_to_text.items()}\n\tid_to_num_found_keywords = {i:len(kw_list) for i,kw_list in id_to_found_keywords.items()}\n\treturn(id_to_found_keywords, id_to_num_found_keywords)", "def list_of_identifiers(data, type):\n identifiers = []\n for id_data in data:\n if id_data.get('type') == type:\n identifiers.append(id_data.get('value'))\n return identifiers", "def Keywords(self, default=[{}]):\n tmp = self.data.get('metadata', {}).get('keywords', default)\n return [HEP.KeywordObject(i) for i in tmp]", "def _extract_terms(self, obj):\r\n terms = set()\r\n if 'paths' in obj:\r\n for path in obj['paths']:\r\n segs = re.split('[/{}]', path)\r\n for seg in segs:\r\n terms.add(seg.lower())\r\n self.terms = terms", "def GetIDs(con, cur, ontList):\n ontids = []\n try:\n sqlStr = \"SELECT id from ontologyTable WHERE (description='%s')\" % ontList[0]\n idx = 0\n while idx < len(ontList):\n sqlStr += \" OR (description='%s')\" % ontList[idx]\n idx = idx + 1\n\n cur.execute(sqlStr)\n if cur.rowcount == 0:\n debug(2, 'Failed to get list of terms')\n else:\n res = cur.fetchall()\n for cres in res:\n ontids.append(res[0])\n\n debug(3, \"Number of ontology ids (out of %d)\" % (len(ontids)))\n return \"\", ontids\n\n except psycopg2.DatabaseError as e:\n debug(7, 'database error %s' % e)\n return \"database error %s\" % e, None", "def search_keyword_in_list(keyword, input_list):\n\n match_list = []\n for element in input_list:\n if element.__name__ == keyword:\n if WarriorCliClass.mock or WarriorCliClass.sim:\n if element.__dict__.get(\"mockready\") is None:\n pNote_level(\"The selected keyword {} isn't supported in trial mode\".format(element.__name__), \"ERROR\")\n else:\n pNote_level(\"Keyword {} is being mocked\".format(element.__name__), \"INFO\")\n match_list.append(element)\n else:\n match_list.append(element)\n return match_list", "def extract_keywords(article_list, n=10):\n vectorizer = TfidfVectorizer()\n tfidf = vectorizer.fit_transform(article_list)\n words = vectorizer.get_feature_names()\n # check N > total_words_length or not\n maxn = tfidf.shape[1] if tfidf.shape[1] < n else n\n weights = tfidf.toarray()\n # sort by decrease order\n indices = map(lambda w: np.argsort(-w)[:maxn], weights)\n keywords = [list(map(lambda i: words[i], indy)) for indy in indices]\n return keywords", "def _find_ids(self,\r\n data_list,\r\n prop,\r\n lookup_index,\r\n lookup_doc_type,\r\n lookup_field):\r\n lg = logging.getLogger(\"%s.%s\" % (self.ln, inspect.stack()[0][3]))\r\n lg.setLevel(self.log_level)\r\n\r\n rtn_list = []\r\n first_time = IsFirst()\r\n for item in data_list:\r\n # the Dot class will retive and set dictionary values via dot\r\n # notation\r\n val = Dot(item).get(prop)\r\n if val.startswith(\"#;lookup#;\"):\r\n lookup_val = val.replace(\"#;lookup#;\", \"\")\r\n lookup_obj = self.get_item(lookup_val, lookup_field)\r\n if first_time.first():\r\n lg.debug(\" lookup_obj:\\n%s\", pp.pformat(lookup_obj))\r\n if lookup_obj:\r\n rtn_list.append(Dot(item).set(prop, lookup_obj['_id']))\r\n return rtn_list", "def _get_keywords(self, title: str):\n # Prepare data\n keywords = set()\n stops = set(nltk.corpus.stopwords.words(\"english\"))\n stemmer = nltk.stem.SnowballStemmer(\"english\")\n ent_types = [\n \"PERSON\", \"ORGANIZATION\", \"FACILITY\", \"LOCATION\", \"DATE\",\n \"TIME\", \"GPE\", \"MONEY\",\n ]\n excluded_word_types = [\"RB\", \"IN\", \"PRP\"]\n\n # Tokenize and chunk words using NLTK\n tokens = nltk.tokenize.word_tokenize(title)\n positions = nltk.pos_tag(tokens)\n chunk = nltk.ne_chunk(positions)\n\n # Make a word list of keywords we want to add, that\n # are not part of our excluded word types.\n words = set()\n for pos in positions:\n word, word_type = pos\n if word.isalnum() and word_type not in excluded_word_types:\n words.add(word)\n\n # Add all entities to keyword list and remove them from\n # our remaining word set so they don't get added again\n # and stemmed later.\n for subtree in chunk.subtrees(filter=lambda t: t.label() in ent_types):\n for leaf in subtree.leaves():\n keywords.add(leaf[0])\n if leaf[0] in words:\n words.remove(leaf[0])\n\n # Add remaining words in list and stem them to base form,\n # stemming means we change words from e.g. \"eating\" to \"eat\".\n for word in words:\n if word not in stops:\n keywords.add(stemmer.stem(word))\n\n return sorted([keyword.lower() for keyword in keywords])", "def id(self, word):\n result = []\n for dic_name in self.dictionaries.keys():\n result += self.dictionaries[dic_name].id(word)\n return result", "def Find(self, children, sink):\n\n tkns = [];\n for child in children:\n key = child.word;\n if not child.word: key = child.tag;\n tkns.append(key);\n self.FindFromTokens(tkns, sink);", "def get_meta_keywords(self):\n return self.get_meta_content(self.article.doc, \"meta[name=keywords]\")", "def getKeywords(self):\n return", "def determine_keywords(self):\n\n split = dict()\n split['email_cc'] = re.compile(\"^\\s*CC[-_]?MAIL[:=]\\s*(.*)\")\n split['email_cc2'] = re.compile(\"^\\s*C[Cc][:=]\\s*(.*)\")\n split['fixed_in'] = re.compile(\"^\\s*FIXED[-_]?IN[:=]\\s*(.*)\")\n\n numeric = dict()\n numeric['bug_fixed'] = re.compile(\"^\\s*(?:BUGS?|FEATURE)[:=]\\s*(.+)\")\n numeric['bug_cc'] = re.compile(\"^\\s*CCBUGS?[:=]\\s*(.+)\")\n\n presence = dict()\n presence['email_gui'] = re.compile(\"^\\s*GUI:\")\n presence['silent'] = re.compile(\"(?:CVS|SVN|GIT|SCM).?SILENT\")\n presence['notes'] = re.compile(\"(?:Notes added by 'git notes add'|Notes removed by 'git notes remove')\")\n\n results = defaultdict(list)\n for line in self.commit.message.split(\"\\n\"):\n # If our line starts with Summary: (as it does when using Arcanist's default template) then strip this off\n # This allows for people to fill keywords in the Differential Summary and have this work smoothly for them\n line = re.sub(\"^Summary: (.+)\", \"\\g<1>\", line)\n\n # Start processing our keywords...\n for (name, regex) in split.iteritems():\n match = re.match( regex, line )\n if match:\n results[name] += [result.strip() for result in match.group(1).split(\",\")]\n\n for (name, regex) in numeric.iteritems():\n match = re.match( regex, line )\n if match:\n results[name] += re.findall(\"(\\d{1,10})\", match.group(1))\n\n for (name, regex) in presence.iteritems():\n if re.match( regex, line ):\n results[name] = True\n\n self.keywords = results", "def parse_keywords(medline):\n keyword_list = medline.find(\"KeywordList\")\n keywords = list()\n if keyword_list is not None:\n for k in keyword_list.findall(\"Keyword\"):\n if k.text is not None:\n keywords.append(k.text)\n keywords = \"; \".join(keywords)\n else:\n keywords = \"\"\n return keywords", "def get_paper_keywords(tree):\n\tpath = '//table/tr/th[text() = \"Keywords:\"]/following-sibling::td/text()'\n\tkeywords = tree.xpath(path)\n\t# xpath returns a list with the keywords as a single string element separated by new lines, commas or semi-colons\n\t# Make this into a list of keywords\n\tif keywords:\n\t\t# Split on new lines, commas and semi-colons\n\t\tkeywords = re.split('[\\\\n,;]', keywords[0])\n\t\t# Remove trailing white space and empty strings\n\t\tkeywords = [kw.strip() for kw in keywords if kw]\n\n\treturn keywords", "def outputids2words(id_list, vocab, article_oovs):\n\twords = []\n\tfor i in id_list:\n\t\ttry:\n\t\t\tw = vocab.id2word(i) # might be [UNK]\n\t\texcept ValueError as e: # w is OOV\n\t\t\tassert article_oovs is not None, \"Error: model produced a word ID that isn't in the vocabulary. This should not happen in baseline (no pointer-generator) mode\"\n\t\t\tarticle_oov_idx = i - vocab.size()\n\t\t\ttry:\n\t\t\t\tw = article_oovs[article_oov_idx]\n\t\t\texcept ValueError as e: # i doesn't correspond to an article oov\n\t\t\t\traise ValueError('Error: model produced word ID %i which corresponds to article OOV %i but this example only has %i article OOVs' % (i, article_oov_idx, len(article_oovs)))\n\t\twords.append(w)\n\treturn words", "def test_filtered_instrument_keywords():\n kw = []\n for ins in JWST_INSTRUMENTS:\n kw.append(mm.instrument_keywords(ins, caom=False)['keyword'].tolist())\n\n assert kw[0] != kw[1] != kw[2] != kw[3] != kw[4]", "def tag_lyrics(data):\r\n\r\n # Initialize list to store tagged lyrics\r\n tagged_documents = []\r\n\r\n # Tag lyrics for all the lyrics in the list\r\n for i, doc in enumerate(data):\r\n\r\n # Tag lyrics\r\n tagged = TaggedDocument(doc, [i])\r\n\r\n # Append tagged lyrics to\r\n tagged_documents.append(tagged)\r\n\r\n return tagged_documents", "def get_headlines_with_keyword(self, kw):\r\n\t\tkey_head = self.keyword_headlines()\r\n\r\n\t\theadlines = set()\r\n\r\n\t\tfor headlinekw in key_head[kw]:\r\n\t\t\tcontent = headlinekw.headlineid.content\r\n\t\t\theadlines.add(content)\r\n\r\n\t\treturn list(headlines)", "def get_keywords(text):\n tokens = [word.lower() for word in word_tokenize(text)]\n\n # tag words as verb, noun etc\n tagged_words = pos_tag(tokens)\n\n # retrieve list of boring words from file\n stopwords_file = os.path.join(BASE_DIR, 'data', 'stopwords.txt')\n with open(stopwords_file, 'r', encoding='utf-8') as f:\n stopwords = [line.rstrip(linesep) for line in f]\n \n #We don't want keywords to contain anything in this list\n forbidden = ['.',',',';',':','?','!','+',')','(','[',']','/','<','>','\"','©','1','2','3','4','5','6','7','8','9','0']\n\n # NLTK Chunking - detects noun phrases and phrases of form verb noun or adj noun\n patterns = \"\"\"NP: {<JJ>*<NN><NNS>}\n {<JJR><NNS>}\n {<JJ>*<NNS>}\n {<NN><NNS>} \n {<JJ><NNS>}\n {<JJ>*<NN>*}\n {<NN>*}\n {<NNS>*}\"\"\"\n chunker = RegexpParser(patterns)\n chunks = chunker.parse(tagged_words)\n\n #these are the phrases we want, as lists within a list\n validphrases = []\n for t in chunks.subtrees():\n if t.label() == 'NP':\n validphrases.append([x for x,y in t.leaves()])\n\n #turning lists within lists into actual noun phrases i.e [[radiation], [breast,cancer]] becomes [radiation, breast cancer]\n #sorry for my horrible code\n #trees suck\n lemmatizables = []\n for sublist in validphrases:\n lemmatizables.append(' '.join(sublist))\n\n lemmatizer = WordNetLemmatizer()\n lems = [lemmatizer.lemmatize(x) for x in lemmatizables]\n\n #removing stopwords after lemmatizinga, then removing anything containing punctuation or a number\n lems = filter(lambda lem: lem not in stopwords, lems)\n lems = filter(lambda lem: not any(char in lem for char in forbidden), lems)\n\n return tuple(lems)", "def keywords(self):\n return list(self._kw)", "def m_dump_all_identities():\n\n m_dump_identities(\"ala\")", "def get_article_keywords(article,\n keywords,\n preprocess_type=PreprocessWordType.LEMMATIZE):\n matches = set()\n for word in article.words:\n preprocessed_word = query_utils.preprocess_word(word,\n preprocess_type)\n if preprocessed_word in keywords:\n matches.add(preprocessed_word)\n return sorted(list(matches))", "def gen_text_list(ids):\n return [[tag.lemma for tag in para if (tag.pos not in TT_EXCLUDED_TAGS)]\n for dm in dm_from_id(ids) for para in dm.get_text_tags()]", "def find(ctx, keyword):\n _check_for_commands(ctx.obj[\"keep_path\"])\n clean_keywords = [x.encode(\"utf8\").strip() for x in keyword]\n keep = ctx.obj[\"keep\"]\n results = {}\n for kw in clean_keywords:\n if kw in keep[\"keyword2Ids\"]:\n result = []\n command_ids = keep[\"keyword2Ids\"][kw]\n for command_id in command_ids:\n result.append({ \n \"command\": keep[\"id2Command\"][str(command_id)],\n \"explanation\": keep[\"id2Explanation\"][str(command_id)],\n \"id\": command_id\n })\n results[kw] = result\n _show_results(results)", "def keywords(text:str) -> list:\n return sorted(set(text.split(' ')), key=frequency, reverse=True)[0:5]", "def get_ids(self, sentence):\n return [self.get_id(word) for word in sentence.strip().split(' ')]", "def get_keywords_for_component(component, user_defined_keywords):\n output_keywords = []\n input_keywords = user_defined_keywords # initialize with the user defined keywords\n input_keywords += component.split('/') # split the component if there are multiple terms involved\n for input_keyword in input_keywords:\n output_keywords.append(input_keyword)\n word_list_split_by_space = input_keyword.split(' ')\n for word in extract_words_from_word_list_split_by_space(word_list_split_by_space):\n output_keywords.append(word)\n output_keywords += get_synonyms(word)\n output_keywords = list(set(output_keywords))\n return output_keywords", "def dehydrate_keywords(self, bundle):\n return map(str, bundle.obj.keywords.all())", "def build_list(self, word_list):\n # Get frequency list for keys\n freq = word_list.groupby('key').agg('count')\n # Filter out only keys with greater or equal frequency to length\n key_list = freq.loc[freq['word'] >= freq.index.str.len()]\n return key_list", "def get_meta_keywords(self, article):\r\n return self.get_meta_content(article.doc, \"meta[name=keywords]\")", "def getmentioningobjs(idfindex, idfobject):\n idf, edges = eppystuff.an_idfedges(idfindex)\n mentioningobjs = idf_helpers.getanymentions(idf, idfobject)\n keys = [mentioningobj.key for mentioningobj in mentioningobjs] \n objnames = [mentioningobj.obj[1] for mentioningobj in mentioningobjs] \n idfkeys = idf_helpers.idfobjectkeys(idf)\n keysobjsindexes = [(idfkeys.index(mentioningobj.key.upper()), \n idf.idfobjects[mentioningobj.key.upper()].index(mentioningobj))\n for mentioningobj in mentioningobjs] \n urls = [\"../../%s/%s\" % (idfkey, objkey) \n for idfkey, objkey in keysobjsindexes]\n urllinks = ['<a href=%s>%s</a>' % (url, name) \n for url, name in zip(urls, objnames)]\n lines = [\"%s->%s\" % (mentioningobj.key, urllink) \n for mentioningobj, urllink in zip(mentioningobjs, urllinks)]\n return ', '.join(lines)", "def getNoteTagNames(self, authenticationToken, guid):\r\n pass", "def get_dictionary_file_lines_for_keywords(self):\n keywords_iter = iter(self.keywords)\n next_keyword = keywords_iter.next()\n print(\"Searching for keyword {}\".format(next_keyword))\n\n self.dictionary_file.open_handle()\n result_lines = list()\n while next_keyword:\n line = self.dictionary_file.read_line_to_obj()\n if not line:\n print(\"Reached end of dictionary file\")\n break\n\n if line.term < next_keyword:\n continue\n elif line.term == next_keyword:\n print(\"Found postings list for term {}\".format(next_keyword))\n result_lines.append(line)\n\n try:\n next_keyword = keywords_iter.next()\n print(\"Searching for keyword {}\".format(next_keyword))\n except StopIteration:\n print(\"Finished searching for all keywords\")\n break\n\n return result_lines", "def get_objects_id(claims: Dict) -> List:\n ids = set()\n for prop in claims:\n for claim in claims[prop]:\n try:\n datatype = claim['mainsnak']['datavalue']['type']\n if datatype == \"wikibase-entityid\":\n d_id = claim['mainsnak']['datavalue']['value']['id']\n ids.add(d_id)\n elif datatype == \"quantity\":\n d_id = claim['mainsnak']['datavalue']['value']['unit'].split(\"/\")[-1]\n ids.add(d_id)\n else:\n continue\n except:\n traceback.print_exc()\n\n return list(ids)", "def getKeywords(tmdbKeywords):\n \n words = []\n if \"keywords\" in tmdbKeywords:\n for keyword in tmdbKeywords[\"keywords\"]:\n words += _format(keyword[\"name\"]).split()\n else:\n raise AttributeError(\"%s instance has no attribute keywords\" % tmdbKeywords) \n return words", "def get_objective_id_terms(self):\n return # osid.search.terms.IdTerm", "def get_objective_id_terms(self):\n return # osid.search.terms.IdTerm", "def get_objective_id_terms(self):\n return # osid.search.terms.IdTerm", "def main():\n records = get_block_of_records({\"keyword\": \"food\"})\n print (\"returned items: {}\".format(len(records)))\n\n processed_records = {}\n for item in records:\n meta = item[\"meta\"]\n umm = item[\"umm\"]\n cid = meta[\"concept-id\"]\n short_name = umm[\"ShortName\"]\n processed_records[cid] = short_name\n\n print (\"uniq keys: {}\".format(len(processed_records.keys())))", "def GetKeywords(self):\n return [FS_COMMANDS, FS_STDLIB, FS_FUNC, FS_CLASS]", "def line_2_words(wordid_list, id2word):\n word_list = []\n for word_id in wordid_list:\n word_list.append(id2word[word_id])\n return word_list", "def keywords(self):\n return self._keywords", "def keywords(self):\n return self._keywords", "def keywords(self):\n return {\n \"unary\": {\n k: v[0] for k, v in self.unary_commands.items()\n },\n \"terminal\": {\n k: v[0] for k, v in self.terminal_commands.items()\n },\n \"binary\": {\n k: v[0] for k, v in self.binary_commands.items()\n },\n }", "def keywords(self):\n return self.__keywords", "def autocomplete(self, token):\n sub_dict = self.container\n for letter in token:\n try:\n sub_dict = sub_dict[letter]\n except KeyError:\n return []\n auto_list = []\n for word_fragment in self.traversal(start=sub_dict):\n if len(auto_list) < 4:\n auto_list.append(token + word_fragment)\n else:\n break\n return auto_list", "def lookup(root: dict, query: str, exact: bool = False) -> List[Set[int]]:\n if not query:\n return set()\n\n word_ids: List[Set[int]] = [] # ids of items that correspond to query\n for word in preprocess_words(query):\n node = root\n for c in word:\n node: Optional[dict] = node.get(c) # type: ignore\n if not node:\n # dead-end for this word\n word_ids.append(set())\n break\n else:\n word_ids.append(collect(node, exact))\n\n return word_ids", "def list(uid: int):\n\n return Token.list(uid)", "def extract_object_token(data, num_tokens, obj_list=[], verbose=True):\r\n token_counter = Counter()\r\n for img in data:\r\n for region in img['objects']:\r\n for name in region['names']:\r\n if not obj_list or name in obj_list:\r\n token_counter.update([name])\r\n tokens = set()\r\n # pick top N tokens\r\n token_counter_return = {}\r\n for token, count in token_counter.most_common():\r\n tokens.add(token)\r\n token_counter_return[token] = count\r\n if len(tokens) == num_tokens:\r\n break\r\n if verbose:\r\n print(('Keeping %d / %d objects'\r\n % (len(tokens), len(token_counter))))\r\n return tokens, token_counter_return", "def keywords_of_section(self, section, kwfilter):\n pcat = getToolByName(section, 'portal_catalog')\n cat = pcat._catalog\n path_idx = cat.indexes[self.path_index]\n tags_idx = cat.indexes[self.keyword_index]\n result = []\n # query all oids of path - low level\n pquery = {\n self.path_index: {\n 'query': '/'.join(section.getPhysicalPath()),\n 'depth': -1,\n }\n }\n kwfilter = safe_encode(kwfilter)\n # uses internal zcatalog specific details to quickly get the values.\n path_result, info = path_idx._apply_index(pquery)\n for tag in tags_idx.uniqueValues():\n if kwfilter and kwfilter not in safe_encode(tag):\n continue\n tquery = {self.keyword_index: tag}\n tags_result, info = tags_idx._apply_index(tquery)\n if intersection(path_result, tags_result):\n result.append(tag)\n # result should be sorted, because uniqueValues are.\n return safe_simplevocabulary_from_values(result)", "def list_term(term):\n\n members = ldapi.search(ld, cfg['ldap_users_base'],\n '(&(objectClass=member)(term=%s))', [ term ])\n return dict([(member[0], member[1]) for member in members])", "def test_extract_keywords_multiple_docs(keyphrase_length):\n top_n = 5\n keywords_list = model._extract_keywords_multiple_docs([doc_one, doc_two],\n top_n=top_n,\n keyphrase_ngram_range=keyphrase_length)\n assert isinstance(keywords_list, list)\n assert isinstance(keywords_list[0], list)\n assert len(keywords_list) == 2\n\n for keywords in keywords_list:\n assert len(keywords) == top_n\n\n for keyword in keywords:\n assert len(keyword[0].split(\" \")) <= keyphrase_length[1]", "def get_keywords(self, pattern=\"*\"):\n\n sql = \"\"\"SELECT collection.collection_id, collection.name,\n keyword.name, keyword.doc, keyword.args\n FROM collection_table as collection\n JOIN keyword_table as keyword\n WHERE collection.collection_id == keyword.collection_id\n AND keyword.name like ?\n ORDER by collection.name, keyword.name\n \"\"\"\n pattern = self._glob_to_sql(pattern)\n cursor = self._execute(sql, (pattern,))\n result = [(row[0], row[1], row[2], row[3], row[4])\n for row in cursor.fetchall()]\n return list(sorted(set(result), key=itemgetter(2)))", "def iterkeys(self):\n r = self.solr.select('%s:%s %s:*'\n % (self.index_uuid_field, self.index_uuid,\n self.d_uid_field))\n for doc in r.results:\n yield doc[self.d_uid_field]\n for _ in range(r.numFound // 10):\n r = r.next_batch()\n for doc in r.results:\n yield doc[self.d_uid_field]", "def get_all_headlines(self):\r\n\t\tlist_vals = list(self.keyword_headlines().values())\r\n\t\tuniq_headlines = set()\r\n\t\tfor list_val in list_vals:\r\n\t\t\tfor headlineobj in list_val:\r\n\t\t\t\tuniq_headlines.add(headlineobj.headlineid.content)\r\n\r\n\t\treturn list(uniq_headlines)", "def enrichKeywords(self, result):\n\n # TODO: Implement function\n pass", "def get_ids(self) -> List[str]:", "def generate_tokenlist(text):\r\n for paragraph in text.find_all('p'):\r\n paragraph_start = True\r\n for sentence in paragraph.find_all('s'):\r\n sentence_start = True\r\n for word in sentence.find_all(['wf', 'punc']):\r\n if word.name == 'punc':\r\n yield TextItem(None, word.string, word.name, word.string, paragraph_start, sentence_start, 0)\r\n paragraph_start = False\r\n sentence_start = False\r\n else:\r\n great_token = Token.from_tag(word)\r\n sense_key = great_token.sense_key if great_token.has_senses else None\r\n for token in great_token.get_components():\r\n yield TextItem('word', token.wordform, token.pos, token.lemma, paragraph_start, sentence_start, 1, sense_key)\r\n paragraph_start = False\r\n sentence_start = False", "def searchGlossary(self,keyword):\n\t\twords = []\n\n\t\tfor letter in glossary:\n\t\t\tfor word in glossary[letter]:\n\t\t\t\tprint word.keys()[0]\n\t\t\t\tif keyword.lower() in word.keys()[0].lower():\n\t\t\t\t\twords.append(word)\n\n\t\treturn words", "def getmetakeywords(allcontent, corpus):\n for i in range(0, len(allcontent)):\n words = re.split(\"[, ]+\", allcontent[i])\n if words[0] == \"Meta\":\n for j in range(3, len(words)):\n if len(processword(words[j])) > 0:\n corpus.append(processword(words[j]))", "def GetKeywords(self):\n if wx.VERSION >= (2, 9, 0, 0, ''):\n return [(0, R_KEYWORDS), (1, R_KEYWORDS2), (2, R_KEYWORDS3)]\n else:\n return [(1, KEYWORDS)]", "def words(self, fileids=None, categories=None):\n for sentence in self.sents(fileids, categories):\n for token in wordpunct_tokenize(sentence):\n yield token", "def recover_from_ids(self, ids, stop_id=None):\n terms = []\n for i in ids:\n terms += [self.get_term(i)]\n if stop_id is not None and i == stop_id:\n break\n return terms", "def instrument_keywords(instrument, caom=False):\n # Retrieve one dataset to get header keywords\n if not caom:\n filter_to_add = {'program': '01440'}\n else:\n filter_to_add = {'proposal_id': '01440'}\n sample = instrument_inventory(instrument, return_data=True, caom=caom,\n add_requests={'pagesize': 1, 'page': 1},\n add_filters=filter_to_add)\n data = [[i['name'], i['type']] for i in sample['fields']]\n keywords = pd.DataFrame(data, columns=('keyword', 'dtype'))\n\n return keywords", "def search_keys(dictionary, search_list=['help_text', 'label']):\n search_item1 = search_list[0]\n search_item2 = search_list[1]\n result = []\n flat_dict = flatten(dictionary)\n for k, v in flat_dict.items():\n if any(x in k for x in search_list):\n result.append( {k: v} )\n\n help_list = []\n for i in result:\n try:\n key = list(i.keys())[0]\n if key and key.endswith(search_item1):\n corresponding_label_key = '.'.join(key.split('.')[:-1]) + '.' + search_item2\n for j in result:\n key_label = list(j.keys())[0]\n if key_label and key_label.endswith(search_item2) and key_label == corresponding_label_key: # and result.has_key(key):\n #import ipdb; ipdb.set_trace()\n help_list.append({search_item2: j[key_label], search_item1: i[key]})\n except Exception as e:\n #import ipdb; ipdb.set_trace()\n print(e)\n\n return help_list" ]
[ "0.59176874", "0.58475524", "0.56898886", "0.56249386", "0.5622464", "0.55950075", "0.5594749", "0.5539701", "0.55360544", "0.54705125", "0.5462367", "0.5412044", "0.53875583", "0.53866786", "0.5362733", "0.52585196", "0.524303", "0.5239646", "0.5229351", "0.517999", "0.51752526", "0.51630795", "0.51602095", "0.5130897", "0.51274234", "0.51170385", "0.51102227", "0.5072791", "0.5068547", "0.50628084", "0.50522375", "0.5040271", "0.5036046", "0.5031082", "0.5029459", "0.5023608", "0.50229937", "0.5015819", "0.4997928", "0.49945995", "0.49776888", "0.4970345", "0.49699345", "0.49692178", "0.49684963", "0.49615666", "0.4954377", "0.49539563", "0.4953119", "0.49432996", "0.4942145", "0.49410653", "0.4934593", "0.49194932", "0.4917208", "0.4916421", "0.49160036", "0.491365", "0.4912218", "0.4908467", "0.48992822", "0.48984668", "0.48883396", "0.48863634", "0.4881915", "0.4878416", "0.4875258", "0.48737574", "0.48702967", "0.48673633", "0.4864006", "0.4863771", "0.4863771", "0.4863771", "0.4861556", "0.4853869", "0.4850623", "0.48503622", "0.48503622", "0.48468244", "0.48423705", "0.4826899", "0.4825882", "0.48231047", "0.48219728", "0.48145476", "0.48094264", "0.48089486", "0.48052502", "0.47937945", "0.4792233", "0.47920367", "0.47917402", "0.47904265", "0.47870168", "0.4784057", "0.4781775", "0.47745594", "0.4773458", "0.47678867", "0.475789" ]
0.0
-1
Extract list of tags from KeyUsage extension.
def extract_key_usage(self, ext): res = [] fields = KU_FIELDS[:] # "error-on-access", real funny if not ext.key_agreement: fields.remove('encipher_only') fields.remove('decipher_only') for k in fields: val = getattr(ext, k, False) if val: res.append(k) return res
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extract_xkey_usage(self, ext):\n oidmap = {v: k for k, v in XKU_CODE_TO_OID.items()}\n res = []\n for oid in ext:\n if oid in oidmap:\n res.append(oidmap[oid])\n else:\n raise InvalidCertificate(\"Unsupported ExtendedKeyUsage oid: %s\" % (oid,))\n return res", "def ext_key_usages(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"ext_key_usages\")", "def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ExtensionAssociationTagArgs']]]]:\n return pulumi.get(self, \"tags\")", "def _ProcessTagsForFileUse(self) -> List[str]:\n return list(self.tags)", "def getTags(number=None):", "def get_tags(self) -> Dict:\n return self.orthanc.get_instance_tags(self.identifier)", "def list_tags(self, entry_name):\n return self.__datacatalog.list_tags(parent=entry_name)", "def ext_key_usages(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"ext_key_usages\")", "def ext_key_usages(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"ext_key_usages\")", "def tags():", "def _tags(self):\n retval = []\n for of in self.tagnames:\n retval.append([of, self.get_datatype(of), self.get(of)])\n return retval", "def tags(self):\n return tuple([x.strip() for x in self._dict.get('tags').split(',')])", "def extended_key_usage(self):\n\n if self._extended_key_usage is None:\n return set()\n\n return set(self._extended_key_usage.native)", "def tags(self) -> pulumi.Output[Optional[Sequence['outputs.ExtensionAssociationTag']]]:\n return pulumi.get(self, \"tags\")", "def tags(self) -> list[str]:\n _args: list[Arg] = []\n _ctx = self._select(\"tags\", _args)\n return _ctx.execute_sync(list[str])", "def tags(self):\n return self.get(\"tags\")", "def tags(self, request, tag_list, group):\n return tag_list", "def getTagList(self):\n if not self.proxy:\n self.proxy = self.session.service(\"ALBehaviorManager\")\n return self.proxy.getTagList()", "def tags(self) -> Sequence[str]:\n return pulumi.get(self, \"tags\")", "def tags(self):\n return self._item.get(\"tags\")", "def get_tags(self, *args, **kwargs):\n \n tags_data = api.get_tags(\n *args,\n api_key=self.__creds.api_key_v2,\n **kwargs)\n return [en.Tag(tag_data) for tag_data in tags_data]", "def get_tags(instance_id=None, keyid=None, key=None, profile=None, region=None):\n tags = []\n client = _get_conn(key=key, keyid=keyid, profile=profile, region=region)\n result = client.get_all_tags(filters={\"resource-id\": instance_id})\n if result:\n for tag in result:\n tags.append({tag.name: tag.value})\n else:\n log.info(\"No tags found for instance_id %s\", instance_id)\n return tags", "def tagkeys(self,\r\n tag):\r\n\r\n return sorted(list(self.get_keys_for_tag(tag)))", "def tags(self, uuid):\n return self._backend.tags(uuid)", "def get_all_tagged(self,tag_name):\n return self.tag2elements[tag_name]", "def listTags(self, authenticationToken):\r\n pass", "def tag_strings(self):\n return [tag.tag_text for tag in self.tags.all()]", "def list_all_tags(self,obs):", "def get_simplified_tags(self) -> Dict:\n return self.orthanc.get_instance_simplified_tags(self.identifier)", "def tags(self):\n tag_docs = self.tag_data\n tags = set([x[\"tag\"] for x in tag_docs])\n # remove the \"thawed\" tag\n tags.discard(\"thawed\")\n return tags", "def get_tags(self,element):\n if element in self.element2tags.keys():\n return self.element2tags[element]\n return []", "def parse_tags_for_video (self, video):\n tags = []\n for tag_key in dict(video['tags']).keys():\n if self._is_size_key(key=tag_key) == False and tag_key != 'summary':\n tags.append(video['tags'][tag_key]['name'])\n return tags", "def tags(cls) -> Sequence[int]:\n # pylint: disable=no-member\n return [tag for _, tag, _ in cls._items_]", "def _get_tags(self) -> Generator[str, None, None]:\n for decorator in self._find_decorators(TagsDecorator):\n for tag in decorator.tags:\n if isinstance(tag, TagData):\n self._extension.add_tag_data(tag)\n yield str(tag)", "def digAllTags(ref, key_tag):\r\n G_ore = ref.findall(key_tag)\r\n for i in range(len(ref)):\r\n if(len(ref[i]) > 0):\r\n G_ore.extend(digAllTags(ref[i], key_tag))\r\n return G_ore", "def getNoteTagNames(self, authenticationToken, guid):\r\n pass", "def getTagList(tags):\n tags = tags[1:len(tags)-1]\n return tags.split('><')", "def tags(self):\n tagexp = re.compile(r\"\\[([^\\]]*)\\]\")\n subject = self['Subject']\n return tagexp.findall(subject)", "def _list_tags(self, expression):\n try:\n for tag in self.dockerioapi.get_tags(expression):\n Msg().out(tag)\n return self.STATUS_OK\n except (KeyError, TypeError, ValueError):\n return self.STATUS_ERROR", "def list_tags(filename):\n storeapps = APP.config[\"storage\"]\n filename = filename.encode(\"utf-8\")\n\n try:\n application = list(nativeapps.io.ls(storeapps, r\".*\" + filename + \"$\"))[0]\n meta_path = os.path.join(os.path.dirname(application), \"metadata.json\")\n metadata = json.loads(nativeapps.io.readfile(meta_path))\n tags = metadata.get(\"tags\", [])\n return flask.jsonify(tags)\n except IndexError:\n return \"Unknown application: %s\" % (application), 404", "def key_usages(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"key_usages\")", "def tags(self) -> Mapping[str, Any]:\n return pulumi.get(self, \"tags\")", "def tags(self) -> Mapping[str, Any]:\n return pulumi.get(self, \"tags\")", "def tags(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"tags\")", "def extra_tags(self) -> str:\n all_tags = dict.fromkeys(\n self.default_tags.split() + self.custom_tags.strip().split()\n )\n return \" \".join(all_tags.keys())", "def _get_tags(fname):\n tarsqi_inst, tarsqidoc = tarsqi.load_ttk_document(fname)\n return tarsqidoc.tags", "def tags(self):\r\n url = '{0}/tags/'.format(self.get_url())\r\n request = http.Request('GET', url)\r\n\r\n return request, parsers.parse_json", "def tags(self):\n return self.__tags[:]", "def tag_names(self):\r\n return [tag.name for tag in self.get_tags()]", "def tags(self) -> Mapping[str, str]:\n return pulumi.get(self, \"tags\")", "def tags(self) -> Mapping[str, str]:\n return pulumi.get(self, \"tags\")", "def tags(self) -> Mapping[str, str]:\n return pulumi.get(self, \"tags\")", "def tags(self) -> Mapping[str, str]:\n return pulumi.get(self, \"tags\")", "def tags(self):\r\n url = self.base_url + 'tags/'\r\n return json.loads(self.bb.load_url(url))", "def get_tags(self):\n resp = self.get(_u.build_uri(\"tags\", domain=self.domain))\n return utils.handle_response(resp)", "def tags(self) -> Sequence[str]:\r\n return self._tags", "def tag_list(context, addon, dev_tags=None, user_tags=None):\n if not dev_tags and not user_tags:\n return ''\n if not dev_tags:\n dev_tags = []\n if not user_tags:\n user_tags = []\n\n c = {\n 'request': context['request'],\n 'addon': addon,\n 'dev_tags': dev_tags,\n 'user_tags': user_tags,\n }\n t = env.get_template('tags/tag_list.html').render(**c)\n return jinja2.Markup(t)", "def registered_tags(self):\r\n return self._mapping.keys()", "def tags(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'tags')\r\n\r\n return http.Request('GET', url), parsers.parse_json", "def get_tags(self):\n return self.tags", "def tags(self) -> List[str]:\n return self._db_data.tags", "def get_supported_extensions(ext=\".as\"):\n result = list(ext + x for x in LOADERS.keys())\n result.append(ext)\n return result", "def get_tags(self):\n tags = self.AWS_TAGS\n\n label_selector = self.label_selector.split('=')\n label_tag = {'Key': label_selector[0], 'Value': label_selector[1]}\n tags.append(label_tag)\n\n annotation_tag = {'Key': self.expire_annotation, 'Value': str(int(self.now + self.DAY_AND_NIGHT))}\n tags.append(annotation_tag)\n\n return tags", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def tags(self):\n return self._tags", "def tags(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"tags\")", "def get_tag_index(self) -> List[str]:\n path = os.path.join(self.directory_path, \"__tags.json\")\n if not os.path.exists(path):\n return list()\n try:\n with open(path) as f:\n return json.load(f)\n except json.decoder.JSONDecodeError:\n print(f\"Could not get tag index. Check file: {path}\")", "def get_tags(self) -> Set[Text]:\r\n return {tag for tag in self.tags}", "def _model_tags(kwargs, key):\r\n if key not in kwargs:\r\n return []\r\n\r\n instance = kwargs[key]\r\n tags = [\r\n u'{}.{}:{}'.format(key, attr, getattr(instance, attr))\r\n for attr in getattr(instance, 'MODEL_TAGS', [])\r\n ]\r\n tags.append(u'model_class:{}'.format(instance.__class__.__name__))\r\n return tags", "def tags():\n tag = \"\"\n tag += \"Supported inflexions and appropriate keys\\n\\n\"\n for item in vkeys.keys():\n tag += (\"%s\\t - %s\\n\" %(item.ljust(10,' '), vkeys[item]))\n return tag", "def getTags(self,):\n\t\treturn self.tags;", "def list_tags(self, session):\n result = self._tag(session.get, session=session)\n return result['tags']", "def get_tags(self):\r\n\r\n\r\n\r\n #using database\r\n\r\n if self.using_database:\r\n aprint('GET TAGS')\r\n value_tuple = (notebookname,)\r\n db_cursor.execute(\"SELECT tag\"\r\n +\" FROM tags_to_keys\"\r\n +\" WHERE notebook=?;\",\r\n value_tuple)\r\n fetched = db_cursor.fetchall()\r\n if fetched:\r\n return {tag[0] for tag in fetched}\r\n\r\n return set()\r\n\r\n #using shelf\r\n\r\n return self.tag_dict.keys()", "async def tags(self, ctx, member: discord.Member = None):\n member = member or ctx.author\n guild_tags = self._tag_dict.get(ctx.guild.id)\n if not guild_tags:\n raise commands.BadArgument(f'This guild does not have any tags!')\n tags = guild_tags.items()\n tags = sorted(tags, key=lambda x: x[1]['uses'], reverse=True)\n data = [f'{tag[0]} - {tag[1][\"uses\"]} uses' for tag in tags if tag[1]['author'] == member.id] # only add to list comp if belongs to author instead of removing from dict items in above lines\n embed = discord.Embed(colour=self.bot.colour)\n embed.set_author(name=f\"All of {ctx.author}'s Tags in {ctx.guild}\", icon_url=ctx.author.avatar_url)\n source = IndexedListSource(data=data, embed=embed, title=\"Tags\")\n await CatchAllMenu(source=source).start(ctx)", "def get_tags(self):\n return self.get_url_data(self.api_url + 'refs/tags')", "def get_tags(self):\n tags = []\n for image in self.client.images.list():\n for tag in image.tags:\n if tag.startswith(self.repository_name):\n tokens = tag.split(':')\n tags.append(tokens[1])\n return tags", "def tags(self) -> Optional[Any]:\n return pulumi.get(self, \"tags\")", "def simplified_tags(self) -> Dict:\n return dict(self.client.get_instances_id_tags(self.id_, params={'simplify': True}))", "def list(self):\n return self._post(\n request='list',\n uri=ApiUri.TAGS.value,\n ).get('tags')", "def dataset_tags(connection):\n assert connection\n query = \"\"\"select * from tags()\"\"\"\n result = sqlio.read_sql_query(query, connection)\n return [item.strip() for item in result['name']], [tag_id.strip() for tag_id in result['tag_id']]", "def tags(self) -> Dict:\n return dict(self.client.get_instances_id_tags(self.id_))", "def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"tags\")", "def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"tags\")", "def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"tags\")", "def first_level_tags(self) -> Any:\n return self.client.get_instances_id_content_tags_path(self.id_, '')", "def ballot_get_tag_values_by_key(key):\r\n return make_request({\"method\": \"ballot_get_tag_values_by_key\",\r\n \"params\": [key],\r\n \"jsonrpc\": \"2.0\",\r\n \"id\": 0, })", "def tags(self) -> dict:\n return self._tags", "def extensions(self) -> Tuple[str, ...]:\n raise NotImplementedError", "def get_tags_list(*args, **kwargs):\n return Tag.objects.active()", "def get_tags_list(*args, **kwargs):\n return Tag.objects.active()", "def metadataGeoTags(tif: TiffFile):\n geoTag: TiffTag = tif.pages[0].tags.get('GeoKeyDirectoryTag')\n if geoTag is not None:\n g: TiffTag = tif.pages[0].tags.get(34737)\n g2: TiffTag = tif.pages[0].tags.get(34736)\n g3: TiffTag = tif.pages[0].tags.get(33922)\n g4: TiffTag = tif.pages[0].tags.get(33550)\n\n tags = [(geoTag.code, 'H', geoTag.count, geoTag.value),\n (g.code, 's', g.count, g.value),\n (g2.code, 'd', g2.count, g2.value),\n (g3.code, 'd', g3.count, g3.value),\n (g4.code, 'd', g4.count, g4.value)]\n return tags\n else:\n print('no geo tags in file')", "def getTags(self, data_type=\"AOD\", filter_full=False, filter_fast=False):\n if not self.hasDataType(data_type):\n logging.warning(\"Unkown data format %s for sample %s (%d)\" % (data_type, self.name(), self.dsid()))\n return []\n List = []\n for key in self.tags(data_type):\n if not filter_full and key.find(\"_s\") != -1: List.append(key)\n elif not filter_fast and key.find(\"_a\") != -1: List.append(key)\n elif not filter_full and not filter_fast: List.append(key)\n elif key.find(\"_s\") == -1 and key.find(\"_a\") == -1: List.append(key)\n return List", "def listTagsByNotebook(self, authenticationToken, notebookGuid):\r\n pass", "def tags(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:\n return pulumi.get(self, \"tags\")" ]
[ "0.6921014", "0.6244223", "0.60213065", "0.594296", "0.59360904", "0.59154725", "0.59044653", "0.5891674", "0.5891674", "0.58631593", "0.5841936", "0.57997465", "0.5775625", "0.5767615", "0.57545197", "0.5743227", "0.5717393", "0.5711848", "0.5693686", "0.5669996", "0.5638222", "0.56306446", "0.5619641", "0.5619368", "0.5616068", "0.56079584", "0.56035024", "0.5603022", "0.5590342", "0.55834717", "0.55807567", "0.55565506", "0.5555874", "0.5545934", "0.5531701", "0.552772", "0.5519967", "0.5516518", "0.55136824", "0.54838324", "0.5479048", "0.5478064", "0.5478064", "0.54760814", "0.5472044", "0.54696965", "0.5467743", "0.54509944", "0.5442975", "0.5429615", "0.5429615", "0.5429615", "0.5429615", "0.54222214", "0.541944", "0.54061586", "0.5404623", "0.5398096", "0.5387695", "0.5385952", "0.5366941", "0.53639257", "0.5359855", "0.53435856", "0.53435856", "0.53435856", "0.53435856", "0.53435856", "0.53435856", "0.53435856", "0.53435856", "0.534349", "0.5342483", "0.53372586", "0.5331637", "0.5330571", "0.5323401", "0.5323328", "0.53115344", "0.5308344", "0.529461", "0.5284744", "0.5279013", "0.5278024", "0.5267248", "0.52669084", "0.5266743", "0.5264148", "0.5264148", "0.5264148", "0.52404", "0.52399206", "0.52396137", "0.52287143", "0.52276945", "0.52276945", "0.5220667", "0.52144206", "0.5214183", "0.5213955" ]
0.6967917
0
Convert Name object to shortcutdict.
def extract_name(self, name): name_oid2code_map = {v: k for k, v in DN_CODE_TO_OID.items()} res = [] for att in name: if att.oid not in name_oid2code_map: raise InvalidCertificate("Unsupported RDN: %s" % (att,)) desc = name_oid2code_map[att.oid] val = as_unicode(att.value) res.append((desc, val)) return res
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _to_dict(self, remove_name=True):\n keys = [\"name\", \"path\", \"type\", \"mode\", \"description\"]\n if remove_name:\n keys.remove(\"name\")\n result = {key: getattr(self, key) for key in keys}\n return _remove_empty_values(result)", "def process_shortcut(s):\n if s.count('[') != s.count(']'):\n raise MismatchedGrouping('Invalid grouping of brackets, %s' % s)\n\n if s.count('\"') % 2 != 0 or s.count(\"'\") % 2 != 0:\n raise MismatchedGrouping('Quotation groupings are mismatched, %s' % s)\n\n ret_dict = {}\n\n # find the classes and id\n for match in rgx_class.findall(s):\n if match.startswith('#'):\n ret_dict.setdefault('id', match.strip('#'))\n\n elif match.startswith('.'):\n classes = ret_dict.setdefault('_classes', [])\n classes.append(match.strip('.'))\n\n # find all of our named attributes\n for key, value in rgx_n_attr.findall(s):\n ret_dict.setdefault(key, value)\n\n ret_dict['class'] = ret_dict.pop('_classes', [])\n\n return ret_dict", "def dumps(self):\n res = {}\n for k, v in ALIASES.items():\n res[k] = getattr(self, v)\n return res", "def _to_dict(self, remove_name=True):\n keys = [\"name\", \"path\", \"type\", \"mode\", \"description\", \"default\", \"min\", \"max\", \"enum\", \"optional\"]\n if remove_name:\n keys.remove(\"name\")\n result = {key: getattr(self, key) for key in keys}\n return _remove_empty_values(result)", "def _disk_dict_pattern(self, name, datastore, new_name=None):\r\n\r\n if not new_name:\r\n new_name = name\r\n\r\n temp_disk_dict = {}\r\n temp_disk_dict[self.disk_pattern.name.value] = name\r\n temp_disk_dict[self.disk_pattern.datastore.value] = datastore\r\n temp_disk_dict[self.disk_pattern.new_name.value] = new_name\r\n return temp_disk_dict", "def _key(self) -> Mapping[str, str]:\n return {self.api: self.name}", "def reverse_aliases():\n result = {}\n aliases = construct_aliases()\n for key in aliases:\n cat, idx = key.split(':')\n prp = ':'.join(aliases[key].split(':')[1:])\n # TODO TODO\n result[cat + '.' + prp] = cat + ':' + idx\n return result", "def _kwargs(self):\n dict = {\"name\":self.name}\n return dict", "def _rename_aliases(input: dict[str, Any]) -> dict[str, Any]:\n return {_UI_FIELD_ALIASES.get(k, k): v for k, v in input.items()}", "def setup_command_dict(self):\n\n out = {}\n for k, v in self.command_aliases.items():\n for i in v:\n out[i] = k # string typed by player:function of MyThing\n return out", "def __to_key(name: str) -> str:\n return name.replace(\" \", \"-\")", "def dottify(self, base_name):\n obj_dict = vars(self)\n dotted_dict = {}\n for k, v in obj_dict.items():\n if v is not None:\n dotted_dict[base_name + '.' + k] = v\n return dotted_dict", "def make_alias_dict(file_name):\n\n # Dict mapping between words->target words\n dict_mapping = {}\n # List of the target words\n words_target = []\n for line in open(file_name, 'r'):\n alias = line.strip('\\n').strip('\\r').split(',')\n alias_target = alias[0] if alias[0] not in dict_mapping else dict_mapping[alias[0]]\n for a in alias:\n dict_mapping[a] = alias_target # use the first term as the aliasing target\n words_target.append(alias_target)\n return dict_mapping, words_target", "def _set_attr_name_map(self):\n self.attr_name_map = {}\n for object_query in self.query:\n object_name = object_query[\"object_name\"]\n object_class = self.object_map[object_name]\n aliases = AttributeInfo.gather_aliases(object_class)\n self.attr_name_map[object_class] = {}\n for key, value in aliases.items():\n filter_by = None\n if isinstance(value, dict):\n filter_name = value.get(\"filter_by\", None)\n if filter_name is not None:\n filter_by = getattr(object_class, filter_name, None)\n value = value[\"display_name\"]\n if value:\n self.attr_name_map[object_class][value.lower()] = (key.lower(),\n filter_by)\n custom_attrs = AttributeInfo.get_custom_attr_definitions(\n object_class)\n for key, definition in custom_attrs.items():\n if not key.startswith(\"__custom__:\") or \\\n \"display_name\" not in definition:\n continue\n try:\n # Global custom attribute definition can only have a single id on\n # their name, so it is safe for that. Currently the filters do not\n # work with object level custom attributes.\n attr_id = definition[\"definition_ids\"][0]\n except KeyError:\n continue\n filter_by = CustomAttributeValue.mk_filter_by_custom(object_class,\n attr_id)\n name = definition[\"display_name\"].lower()\n self.attr_name_map[object_class][name] = (name, filter_by)", "def __save_url_mapping(instance):\n short_to_url = Url.__load_url_mapping()\n short_to_url[instance.short_url] = instance\n pickle.dump(short_to_url, open(\"short_to_url.p\", \"wb\"))", "def rename(self,name):\n for key in OBJECTS_MASTER_DICT:\n if OBJECTS_MASTER_DICT[key] == self.alias:\n OBJECTS_MASTER_DICT[key] = name\n self.alias = name", "def shortcut_lookup(name):\n if (name not in shortcuts.keys()):\n return None\n\n qname = shortcuts[name]\n ind = lookup(qname, __check_shortcuts=False)\n\n return ind", "def renamed_dict(event):\n\n new_dict = thaw(event.data())\n\n for old, new in list(rename_map.items()):\n new_dict[new] = new_dict.pop(old)\n\n return new_dict", "def insertable_dict(self):\n # .strip('_') is for type_\n return {\n 'f_' +\n p.key.strip('_'): getattr(\n self,\n p.key) for p in self.__mapper__.attrs}", "def normalise_bookmarks(self, data):\n return {\n k: v.__dict__ for k, v in data.items()\n }", "def kwarg_to_str_dict(self) -> dict:\n kwarg_dict = {}\n for key, val in {**self.kwargs}.items():\n if hasattr(val, \"_ref\"):\n kwarg_dict[key] = val.ref\n else:\n kwarg_dict[key] = val\n return kwarg_dict", "def nameToAddress(self, name):\n pass", "def to_dict(self):\n return {name: getattr(self, name)\n for name in dir(self) if name.isupper()}", "def dictOfVariables(self):\n return {self.name: self}", "def convert_object_to_dictionary(obj):\n # type: (Any) -> Dict[str, str]\n # hydra ConfigStore special case:\n if obj.__class__.__module__.startswith(\"hydra.core\") and hasattr(obj, \"repo\"):\n return obj.repo\n\n dic = {}\n for attr in dir(obj):\n if attr.startswith(\"__\") or attr.startswith(\"to_\"):\n continue\n value = getattr(obj, attr)\n if callable(value):\n continue\n try:\n dic[attr] = str(value)\n except Exception:\n pass\n return dic", "def labels_to_slugs(self):\n return {\n column_attrs[LABEL]: reserve_encoded(column_name) for\n (column_name, column_attrs) in self.items()\n }", "def CopyToDict(self):\n return {'labels': self.labels}", "def get_name_dict(self, path, ch_name_dict):\n if path in self.saved_dicts:\n (id_dict, name_dict) = self.saved_dicts[path]\n else:\n (id_dict, name_dict) = self.construct_dicts(path, ch_name_dict)\n self.saved_dicts[path] = (id_dict, name_dict)\n\n return name_dict", "def original2target(self) -> Dict[str, str]:\n return {\n self.keywords[i]: self.target_words[i]\n for i in range(len(self.keywords))\n }", "def __getitem__(self, name):\n ikEl = self.infoKinds.get(name, None)\n if ikEl:\n return ikEl.toDict(self)\n return None", "def transform_name_mapping(self) -> pulumi.Output[Mapping[str, str]]:\n return pulumi.get(self, \"transform_name_mapping\")", "def _namespace_to_dict_util(n):\n if not isinstance(n, SimpleNamespace):\n return n\n\n ret = {}\n for k, v in vars(n).items():\n ret[k] = _namespace_to_dict_util(v)\n\n return ret", "def dictize(self):\n dict = {}\n for node in self.sort():\n logger.debug(\"Dictize: id %s has name %s\" % (node._id, node.name))\n x = node._kwargs()\n dict[node._id]={\"klass\":node.__class__.__name__, \n \"kwargs\": x,\n \"children\":[child._id for child in node.children()]}\n return dict", "def __call__(self, _shortcut=None, **attrs):\n\n def fix_key(k):\n return k.strip('_').replace('_', '-')\n\n def check_val(v, ttype='class'):\n v = str(v)\n\n if not v:\n return v\n\n invalid_chars = ' .,'\n\n for c in invalid_chars:\n if c in v:\n raise InvalidAttribute('\"%s\" is an invalid `%s` value' % (v, ttype))\n\n return v\n\n attrs = {fix_key(k): v for k, v in attrs.items()}\n\n if _shortcut and isinstance(_shortcut, (str, unicode)):\n processed = process_shortcut(_shortcut)\n\n else:\n processed = {}\n\n # make a copy of the incoming attributes\n attr_clone = dict(attrs)\n\n # combine the new and the _shortcut class lists\n cur_classes = attrs.get('class', '').split(' ')\n all_classes = processed.get('class', []) + cur_classes\n\n # merge the _shortcut attributes and those supplied to the method\n attr_clone.update(processed)\n\n # re-set the classes on the returned object\n attr_clone['class'] = ' '.join(map(check_val, filter(None, all_classes)))\n\n id_val = attr_clone.get('id', None)\n if id_val:\n attr_clone['id'] = check_val(str(id_val))\n\n for k, v in list(attr_clone.items()):\n if not v:\n attr_clone.pop(k, None)\n\n return Node(self.tag, attr_clone)", "def _create_symbol_mapping():\n normal_items = [\"+\", \"-\"]\n unicode_items = [chr(0x2070 + i) for i in range(10, 12)]\n\n # Create a dict mapping the two.\n return DefaultDictionary(zip(normal_items, unicode_items))", "def get_key (self, name):\n return self + name", "def derive_url_dict(self, url_obj):\n udict = dict(url_obj.__dict__)\n udict.pop(\"_sa_instance_state\")\n return udict", "def key(self, name):\n return name", "def __init__(self, name=None, hidden_prefix=None):\n\n self.attrs = {}\n\n if hidden_prefix is None:\n self.hidden_prefix = \"__\"\n else:\n self.hidden_prefix = hidden_prefix\n\n # All variables (excluding groups) will be saved into this dictionary:\n self._vars = {}\n\n # All groups will be saved here.\n self._groups = {}\n\n if name is None:\n self.name = \"{} {}\".format(id(self), type(self), )\n else:\n self.name = name\n\n self._link_from_main = None", "def nameYamlMapping(name: str, mapping: Dict[str, Any]) -> Dict[str, Any]:\n # Because we want \"name\" field to be the first member for aesthetic\n # reasons, we create a new dict and copy ``mapping`` to it.\n newMapping = {\"name\": name}\n newMapping.update((key, value) for key, value in mapping.items() if key != \"name\")\n return newMapping", "def upstream_typed_keys(self, instance_name):\n keys = self.upstream_keys(instance_name)\n d = {}\n for k in keys:\n d[k] = self.node_map[k]\n return d", "def __load_url_mapping():\n try:\n return pickle.load(open(\"short_to_url.p\", \"rb\"))\n except IOError:\n return {}", "def target2original(self) -> Dict[str, str]:\n return {\n self.target_words[i]: self.keywords[i] \n for i in range(len(self.keywords))\n }", "def _shortcut(input, residual, name):\n # Expand channels of shortcut to match residual.\n # Stride appropriately to match residual (width, height)\n # Should be int if network architecture is correctly configured.\n input_shape = K.int_shape(input)\n residual_shape = K.int_shape(residual)\n stride_width = int(round(input_shape[ROW_AXIS] / residual_shape[ROW_AXIS]))\n stride_height = int(round(input_shape[COL_AXIS] / residual_shape[COL_AXIS]))\n equal_channels = input_shape[CHANNEL_AXIS] == residual_shape[CHANNEL_AXIS]\n\n shortcut = input\n # 1 X 1 conv if shape is different. Else identity.\n if stride_width > 1 or stride_height > 1 or not equal_channels:\n shortcut = Conv2D(filters=residual_shape[CHANNEL_AXIS],\n kernel_size=(1, 1),\n strides=(stride_width, stride_height),\n padding=\"valid\",\n kernel_initializer=\"he_normal\",\n kernel_regularizer=l2(0.0001),\n\t\t name = name)(input)\n\n return add([shortcut, residual])", "def default(self, obj):\n return {'__{}__'.format(obj.__class__.__name__): obj.__dict__}", "def as_kwargs(self) -> Dict[str, Any]:\n ret = {}\n for arg in self.args.values():\n ret[arg.name] = arg.value\n return ret", "def mapping(self) -> Dict[str, str]:\n return self._normalizer.get_placeholders()", "def __getstate__(self) -> Dict[str, Any]:\n return {\"name\": self.name}", "def ref(name):\n return { 'name': name } if name else None", "def make_unpack_map(node):\n return dict(zip(node.names, node.iternodes()))", "def canonical_variables(self):\n if not hasattr(self, 'bound_symbols'):\n return {}\n dums = numbered_symbols('_')\n reps = {}\n # watch out for free symbol that are not in bound symbols;\n # those that are in bound symbols are about to get changed\n bound = self.bound_symbols\n names = {i.name for i in self.free_symbols - set(bound)}\n for b in bound:\n d = next(dums)\n if b.is_Symbol:\n while d.name in names:\n d = next(dums)\n reps[b] = d\n return reps", "def conversion_context(self):\n d = {}\n wanted = ['a', 'h']\n for x in wanted:\n if x in self.properties:\n d[x] = self.properties[x]\n return d", "def __init__(self, language=config[\"default_language\"],\n lowercasing=config[\"lowercasing\"],\n path=None, resource=\"names\"):\n super(NameDictionary, self).__init__(language=language,\n lowercasing=lowercasing,\n path=path, resource=resource)", "def to_dictionary(self):\n new_dictionary = {}\n for key, value in self.__dict__.items():\n new_dictionary[key.split(\"__\")[-1]] = value\n new_dictionary['size'] = new_dictionary['width']\n del new_dictionary['width']\n del new_dictionary['height']\n return new_dictionary", "def namedtuples2dicts(namedtuples):\n return {k: dict(v._asdict()) for k, v in namedtuples.items()}", "def as_kwargs(self) -> Dict[str, Parameter]:\n dict_out = {}\n\n # make all parameter paths absolute\n try:\n absolute_parameters = self.make_unc_paths(self.parameters)\n except NoAbsoluteRootPathException as e:\n raise ParameterMappingError(e) from e\n\n for parameter in absolute_parameters:\n if self.is_non_keyword(parameter):\n # This parameter should not be included in kwargs. Skip\n continue\n elif self.is_source_identifier(parameter):\n if self.is_pacs_type(parameter):\n dict_out[\"source_instance_id\"] = get_legacy_idis_value(\n parameter.value\n )\n elif self.is_path_type(parameter):\n dict_out[\"source_path\"] = str(parameter.value)\n else:\n raise ParameterMappingError(\n f\"Unknown source parameter '{parameter}'\"\n )\n else:\n try:\n dict_out[self.PARAMETER_KEYWORDS[type(parameter)]] = str(\n parameter.value\n )\n except KeyError as e:\n raise ParameterMappingError(\n f\"Unknown parameter '{parameter}'\"\n ) from e\n\n return dict_out", "def nodes_names_map(self):\n return {nd.name: nd for nd in self.nodes}", "def build_shell_dict(self):\n shell_dict = {'id': self.json_file['name'].lower().split(' ')[0],\n 'name': Template('GATK4 AUTO $name').substitute(self.json_file),\n 'short_name': self.json_file['name'].split(' ')[0],\n 'profile': self.profile,\n 'description': self.json_file['summary'].rstrip(' '),\n 'summary': pypandoc.convert_text(self.json_file['description'], 'rst', format='html')}\n return shell_dict", "def to_dict(self):\n return {\n \"name\": self.name,\n \"path_destination\": self.path_destination,\n \"path_source\": self.path_source,\n \"text\": self.text,\n \"create_link\": self.create_link,\n \"sudo\": self.sudo,\n \"comments\": self.comments,\n }", "def _prefixed(nt: namedtuple, prefix):\n result = {}\n for key, value in nt._asdict().items():\n result[prefix + key] = value\n return result", "def convert_abbrev(word):\r\n return abbreviations[word.lower()] if word.lower() in abbreviations.keys() else word", "def transform_name_mapping(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:\n return pulumi.get(self, \"transform_name_mapping\")", "def test_dotwiz_plus_to_attr_dict():\n dw = DotWizPlus(hello=[{\"Key\": \"value\", \"Another-KEY\": {\"a\": \"b\"}}],\n camelCased={r\"th@#$%is.is.!@#$%^&*()a{}\\:<?>/~`.T'e'\\\"st\": True})\n\n assert dw.to_attr_dict() == {\n 'hello': [\n {\n 'another_key': {'a': 'b'},\n 'key': 'value',\n }\n ],\n 'camel_cased': {'th_is_is_a_t_e_st': True},\n }", "def make_keydict(self, analyte=None):\n if analyte is None:\n analyte = self.analytes\n elif isinstance(analyte, str):\n analyte = [analyte]\n\n out = {}\n for a in analyte:\n key = []\n for f in self.components.keys():\n if self.switches[a][f]:\n key.append(f)\n out[a] = ' & '.join(sorted(key))\n self.keydict = out\n return out", "def create_name_mappings(\n config: Dict[str, Dict[str, Union[str, List]]], map_full_to_short: bool = True\n) -> Dict:\n\n csv_to_excel = {}\n for name, params in config.items():\n try:\n csv_to_excel[name] = params[\"short_name\"]\n except KeyError:\n if len(name) > 31:\n logger.info(f\"{name} does not have a 'short_name'\")\n continue\n\n if map_full_to_short:\n return csv_to_excel\n else:\n return {v: k for k, v in csv_to_excel.items()}", "def get_dict_of_str2(self):\n pass", "def dictOfRandomVariables(self):\n return {self.name: self}", "def rename(op_name):\n return type(op_name, (OpConverter,), {})", "def get_alias(self):", "def as_dict(self):\n\n OptiObjFunc_dict = dict()\n OptiObjFunc_dict[\"description\"] = self.description\n if self.func is None:\n OptiObjFunc_dict[\"func\"] = None\n else:\n OptiObjFunc_dict[\"func\"] = [\n dumps(self._func[0]).decode(\"ISO-8859-2\"),\n self._func[1],\n ]\n # The class name is added to the dict fordeserialisation purpose\n OptiObjFunc_dict[\"__class__\"] = \"OptiObjFunc\"\n return OptiObjFunc_dict", "def _shortcut(input_feature, residual, conv_name_base=None, bn_name_base=None):\n # Expand channels of shortcut to match residual.\n # Stride appropriately to match residual (width, height)\n # Should be int if network architecture is correctly configured.\n input_shape = K.int_shape(input_feature)\n residual_shape = K.int_shape(residual)\n stride_width = int(round(input_shape[ROW_AXIS] / residual_shape[ROW_AXIS]))\n stride_height = int(round(input_shape[COL_AXIS] / residual_shape[COL_AXIS]))\n equal_channels = input_shape[CHANNEL_AXIS] == residual_shape[CHANNEL_AXIS]\n\n shortcut = input_feature\n # 1 X 1 conv if shape is different. Else identity.\n if stride_width > 1 or stride_height > 1 or not equal_channels:\n # print('reshaping via a convolution...')\n shortcut = Conv2D(\n filters=residual_shape[CHANNEL_AXIS],\n kernel_size=(1, 1),\n strides=(stride_width, stride_height),\n padding=\"valid\",\n kernel_initializer=\"he_normal\",\n kernel_regularizer=l2(0.0001),\n name=conv_name_base,\n )(input_feature)\n shortcut = BatchNormalization(axis=CHANNEL_AXIS, name=bn_name_base)(shortcut)\n\n return add([shortcut, residual])", "def get_page_from_name(name: str) -> Dict:\n name = synonyms_to_names[name]\n wiki_data_index = wiki_data[\"name\"].index(name)\n return {\n \"name\": wiki_data[\"name\"][wiki_data_index],\n \"url\": wiki_data[\"url\"][wiki_data_index],\n \"categories\": wiki_data[\"categories\"][wiki_data_index],\n \"content\": wiki_data[\"content\"][wiki_data_index],\n \"links\": wiki_data[\"links\"][wiki_data_index],\n \"synonyms\": wiki_data[\"synonyms\"][wiki_data_index],\n }", "def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]:\n\n # return { \"faq_choice\": self.from_entity(\"faq_choice\"),\"faq_question\": self.from_entity(\"faq_question\"), \"faq_text\": [self.from_text()]}\n\n return {\"faq_choice\": [self.from_entity(\"faq_choice\"), self.from_text()], \"faq_text\": [self.from_text(), self.from_entity(entity=\"navigation\")]}", "def dumps(self) -> Dict[str, Any]:\n contents = super().dumps()\n contents[\"name\"] = self.name\n return contents", "def getKey(instance):\n return instance['name']", "def create_namedict(names_excel_path, style=\"shortname [acc-db]\"):\n #################################################################\n # EXTRACT NAMES FROM NAMES EXCEL FILE #\n #################################################################\n df_names = pd.read_excel(names_excel_path, index_col=0)\n # restrict names dict to only that database\n df_names[\"acc\"] = df_names.index\n df_names[\"acc_db\"] = df_names.acc + \"-\" + df_names.database\n df_names.set_index(\"acc_db\", inplace=True, drop=False)\n df_names.index.name = \"acc_db_index\"\n\n # df_names.acc_db_for_figs = df_names.acc_db.replace(\"crystal\", \"X-ray\")\n\n # add old names in index \"e.g. Q13563-crystal\", so that they are replaced with new \"X-ray\" names in figs\n xray_row_bool_ser = df_names.acc_db.str.contains(\"X-ray\")\n df_xray = df_names.loc[xray_row_bool_ser == True].copy()\n df_xray.index = df_xray[\"PDB acc\"] + \"-crystal\"\n df_xray[\"acc_db\"] = df_xray[\"PDB acc\"] + \"-\" + df_xray.database\n df_names = pd.concat([df_names.loc[xray_row_bool_ser == False], df_xray])\n\n # df_names = df_names.loc[df_names.database == database]\n if style == \"shortname [acc-db]\":\n df_names[\"label\"] = df_names.shortname + \" [\" + df_names.acc_db + \"]\"\n elif style == \"shortname [acc]\":\n df_names[\"label\"] = df_names.shortname + \" [\" + df_names.acc + \"]\"\n else:\n raise ValueError(\"other styles not implemented\")\n\n namedict = df_names[\"label\"].to_dict()\n return namedict", "def object_graph_key_mapping(checkpoint_path: str) -> Dict[str, str]:\n reader = tf.train.load_checkpoint(checkpoint_path)\n object_graph_string = reader.get_tensor('_CHECKPOINTABLE_OBJECT_GRAPH')\n object_graph_proto = trackable_object_graph_pb2.TrackableObjectGraph()\n object_graph_proto.ParseFromString(object_graph_string)\n names_to_keys = {}\n for node in object_graph_proto.nodes:\n for attribute in node.attributes:\n names_to_keys[attribute.full_name] = attribute.checkpoint_key\n return names_to_keys", "def __init__(self, name):\n self.name = name\n self.registry = weakref.WeakValueDictionary()", "def _attrs_map(self) -> \"dict[int, str]\":\n return {i: attr.name for i, attr in enumerate(self._attrs())}", "def custom_encode(obj):\n if isinstance(obj, DictionaryMethods):\n key = '__Dictionary__'\n return {key: [list(obj), obj.alpha, obj.pat, obj.pat_args,\n obj.auto_fields]}\n elif isinstance(obj, Entry):\n return obj.data\n else:\n raise TypeError(\"obj {!r} of type {}\".format(obj, type(obj)))", "def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]:\n return {\n \"topic\": [\n self.from_text(),\n ],\n }", "def to_dict(self):\n return {key: getattr(self, key) for key in self.keys}", "def copy_to_dict(self):\n search_hit_dict = {}\n search_hit_dict['Offset'] = self.offset\n search_hit_dict['Filename (inode)'] = self.filename\n search_hit_dict['String'] = self.data\n\n return search_hit_dict", "def dict(self) -> dict():\n\n dict_reg_hive = {}\n\n for _attribute in self.attributes.__dict__.items():\n if isinstance(_attribute[1], str):\n if not True in [_attribute[1].startswith(prefix) for prefix in ['<', 'providers.', 'None']]:\n _attribute_value = getattr(self, _attribute[1])\n dict_reg_hive.update({_attribute[1]: _attribute_value})\n\n return dict_reg_hive", "def to_dict(self) -> Dict:\n _dict = {}\n if hasattr(self, 'name') and self.name is not None:\n _dict['name'] = self.name\n if hasattr(self, 'description') and self.description is not None:\n _dict['description'] = self.description\n if hasattr(self, 'address') and self.address is not None:\n _dict['address'] = self.address\n if hasattr(self, 'enabled') and self.enabled is not None:\n _dict['enabled'] = self.enabled\n return _dict", "def _converttonamespace(o):\n ret = {}\n\n # These things are written directy into the dictionary.\n direct = (numbers.Number, np.number, tuple,\n list, np.ndarray, str)\n\n for key, val in o.__dict__.items():\n\n # Ignore hidden variables\n if key.startswith(\"_\"):\n continue\n # Skip fields that should not be stored\n if isinstance(val, Field) and val.save == False:\n continue\n\n if val is not None and isinstance(val, direct):\n ret[key] = copy.copy(val)\n else:\n ret[key] = _converttonamespace(val)\n\n return SimpleNamespace(**ret)", "def upgrade_state_dict_named(self, state_dict, name):\n return state_dict", "def normalize_mapping(mapping):\n if mapping is None:\n return None\n\n _mapping = mapping.to_dict()\n _mapping['name'] = mapping['id']\n return _mapping", "def name_to_dict(s):\n n = {}\n s = s.split()\n n['first_name'] = s[0]\n n['last_name'] = s[1]\n return n", "def to_dict(self) -> Dict[str, Any]:\n return self.__dict__.copy()", "def to_dict(self) -> Dict[str, Any]:\n return {\n \"container_url\": self.container_url,\n \"prefix\": self.prefix\n }", "def to_dict(self) -> dict:\n return dict(\n class_str=f\"{self.class_object.__module__}.{self.class_object.__name__}\",\n run=self.method_str,\n args=self.arg_to_str_list(),\n kwargs=self.kwarg_to_str_dict(),\n )", "def __init__(self, name: str, remap_to: typing.Dict[str, str]):\n super().__init__(name=name)\n self.logger.debug(\"%s.__init__()\" % (self.__class__.__name__))\n self.blackboard = self.attach_blackboard_client()\n self.blackboard.register_key(\n key=\"/foo/bar/wow\",\n access=py_trees.common.Access.WRITE,\n remap_to=remap_to[\"/foo/bar/wow\"],\n )", "def setupShortcuts(self):\r\n # productive\r\n profprint()\r\n macros = (\r\n (\"Ctrl+Return\", self.segmentNeedle),\r\n (\"Ctrl+z\", self.logic.deleteLastNeedle),\r\n (\"Ctrl+y\", self.acceptNeedleTipEstimate),\r\n (\"Ctrl+n\", self.rejectNeedleTipEstimate),\r\n (\"Ctrl+u\", self.acceptNeedleTipEstimateAsNewTempMarker),\r\n )\r\n\r\n for keys, f in macros:\r\n k = qt.QKeySequence(keys)\r\n s = qt.QShortcut(k, slicer.util.mainWindow())\r\n s.connect('activated()', f)\r\n s.connect('activatedAmbiguously()', f)\r\n print \"'%s' -> '%s'\" % (keys, f.__name__)\r\n # convenient for the python console\r\n globals()['nfw'] = nfw = slicer.modules.NeedleFinderWidget\r\n globals()['nfl'] = nfl = slicer.modules.NeedleFinderWidget.logic\r\n print \"nfl -> NeedleFinderLogic\"\r\n print \"nfw -> NeedleFinderWidget\"", "def _make_links(self,\n links: Mapping[str, Union[str, Dict[str, Any]]],\n relationship: Optional[str] = None):\n evaluated_links = {}\n for name, link_payload in links.items():\n evaluated_links[name] = link_payload\n for param, arg in link_payload.items():\n evaluated_links[name][param] = (\n arg(self) if callable(arg) else arg)\n links_factories = self.__links_factories__\n return {\n name: links_factories[self._qualname(name, relationship)](**evaluated_links[name])\n if links_factories.get(self._qualname(name, relationship)) is not None\n else evaluated_links[name]\n for name in evaluated_links\n }", "def name(self):\r\n if self._name_map is None:\r\n self._name_map = {}\r\n for key,value in CursorKind.__dict__.items():\r\n if isinstance(value,CursorKind):\r\n self._name_map[value] = key\r\n return self._name_map[self]", "def copy_forward_mapping(self) -> Dict[str, Set[str]]:\n return deepcopy(self._forward_mapping)", "def to_dict(self):\n ret = {}\n for key in dir(self):\n if key.startswith(\"_\"):\n continue\n\n if key in ['id', 'objects', 'pk', 'STRICT']:\n continue\n\n obj = getattr(self, key)\n if callable(obj):\n continue\n ret[key] = obj\n return ret", "def to_dict(self):\n fields = {}\n for label in self.fields:\n field = getattr(self, label)\n if not field is None:\n fields[label] = field.url\n return fields", "def load_browser_dictionary(self):\n name_dict = {}\n rows = self.cursor.execute(\"SELECT * FROM browser\")\n for row in rows:\n browser_id = int(row[\"id\"])\n browser_name = row[\"name\"]\n name_dict[browser_id] = browser_name\n return name_dict", "def cmip6_renaming_dict():\n # I could probably simplify this with a generalized single dict, \n # which has every single possible `wrong` name and then for each model\n # the renaming function just goes through them...\n dim_name_dict = {\n \"AWI-CM-1-1-MR\":{},\n \"BCC-CSM2-MR\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"vertex\": None,\n 'time_bounds': \"time_bnds\",\n },\n \"BCC-ESM1\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"vertex\": \"vertex\",\n 'time_bounds': \"time_bnds\",\n },\n \"CAMS-CSM1-0\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n \"vertex\": 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n \"CanESM5\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n \"time_bounds\": \"time_bnds\",\n \"vertex\": \"vertices\",\n },\n \"CanESM5-CanOE\": {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n \"vertex\": \"vertices\",\n },\n \"CNRM-CM6-1\": {\n \"x\": [\"x\", 'lon'],\n \"y\": [\"y\", 'lat'],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\": \"axis_nbounds\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": \"bounds_lon\",\n \"lat_bounds\": \"bounds_lat\",\n 'vertex': \"nvertex\",\n 'time_bounds': \"time_bnds\",\n },\n \"CNRM-ESM2-1\": {\n \"x\": [\"x\", \"lon\"],\n \"y\": [\"y\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": \"bounds_lon\",\n \"lat_bounds\": \"bounds_lat\",\n \"bnds\":\"axis_nbounds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"E3SM-1-0\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\":\"time_bounds\",\n 'vertex': None,\n },\n \"E3SM-1-1\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\":\"time_bounds\",\n 'vertex': None,\n },\n \"E3SM-1-1-ECA\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"bnds\":\"bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\":\"time_bounds\",\n 'vertex': None,\n },\n \"EC-Earth3-LR\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n },\n \"EC-Earth3-Veg\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n # 'dzt': 'thkcello',\n },\n \"EC-Earth3\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n },\n \"FGOALS-f3-L\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n },\n \"NICAM16-7S\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\": \"time_bnds\",\n 'vertex': 'vertices',\n },\n \"MIROC-ES2L\": {\n \"x\": [\"x\", 'lon'],\n \"y\": [\"y\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": [\"lev\", \"zlev\"],\n \"lev_bounds\": [\"lev_bnds\", \"zlev_bnds\"],\n \"lon_bounds\": \"x_bnds\",\n \"lat_bounds\": \"y_bnds\",\n \"time_bounds\": \"time_bnds\",\n 'vertex': 'vertices',\n },\n \"MIROC6\": {\n \"x\": [\"x\", 'lon'],\n \"y\": [\"y\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"x_bnds\",\n \"lat_bounds\": \"y_bnds\",\n 'time_bounds': \"time_bnds\",\n },\n \"HadGEM3-GC31-LL\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n },\n \"HadGEM3-GC31-MM\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'time_bounds': \"time_bnds\",\n },\n \"UKESM1-0-LL\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n \"time_bounds\":\"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n },\n 'GISS-E2-2-G': { \n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"GISS-E2-1-G-CC\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"GISS-E2-1-G\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"GISS-E2-1-H\": {\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n \"CESM1-1-CAM5-CMIP5\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":\"d2\",\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"CESM2-WACCM\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":\"d2\",\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"CESM2-WACCM-FV2\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":\"d2\",\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"CESM2\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":'d2',\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"CESM2-FV2\": {\n \"x\": [\"nlon\", \"lon\"],\n \"y\": [\"nlat\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"bnds\":'d2',\n \"time_bounds\":\"time_bnds\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': 'vertices',\n },\n \"GFDL-CM4\": {\n \"x\": [\"x\",\"lon\"],\n \"y\": [\"y\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\": \"time_bnds\",\n # 'vertex': 'vertex',\n # 'dzt': 'thkcello',\n },\n \"GFDL-OM4p5B\": {\n \"x\": [\"x\",\"lon\"],\n \"y\": [\"y\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\": \"time_bnds\",\n # 'vertex': 'vertex',\n # 'dzt': 'thkcello',\n },\n \"GFDL-ESM4\": {\n \"x\": [\"x\",\"lon\"],\n \"y\": [\"y\", \"lat\"],\n \"lon\": \"lon\",\n \"lat\": \"lat\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n \"time_bounds\": \"time_bnds\",\n # 'vertex': 'vertex',\n # 'dzt': 'thkcello',\n },\n \"NESM3\": {\n \"x\": ['i', \"lon\"],\n \"y\": ['j', \"lat\"],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n # 'dzt': 'thkcello',\n },\n \"MRI-ESM2-0\": {\n \"x\": ['x', \"lon\"],\n \"y\": ['y', \"lat\"],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"bnds\":'bnds',\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": [\"x_bnds\", 'lon_bnds'],\n \"lat_bounds\": [\"y_bnds\", 'lat_bnds'],\n \"time_bounds\": \"time_bnds\",\n 'vertex': 'vertices',\n },\n \"SAM0-UNICON\": {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": \"longitude\",\n \"lat\": \"latitude\",\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n # 'dzt': 'thkcello',\n },\n \"MCM-UA-1-0\": {\n \"x\": \"longitude\",\n \"y\": \"latitude\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'time_bounds': \"time_bnds\",\n # 'vertex': 'vertices',\n # 'dzt': 'thkcello',\n }, \n 'IPSL-CM6A-LR': {\n \"x\": ['x', \"lon\"],\n \"y\": ['y', \"lat\"],\n \"lon\": 'nav_lon',\n \"lat\": 'nav_lat',\n \"lev\": [\"lev\",\"deptht\", \"olevel\"],\n \"lev_bounds\": [\"lev_bounds\", \"deptht_bounds\",'olevel_bounds'],\n \"lon_bounds\": \"bounds_nav_lon\",\n \"lat_bounds\": \"bounds_nav_lat\",\n 'vertex': 'nvertex',\n \"bnds\":\"axis_nbounds\",\n 'time_bounds': \"time_bnds\",\n # 'dzt': 'thkcello',\n },\n 'NorCPM1': {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": None,\n \"lat_bounds\": None,\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'NorESM1-F': {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'NorESM2-LM': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'NorESM2-MM': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\", # i leave this here because the names are the same as for the other Nor models.\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n \n 'MPI-ESM1-2-HR': {\n \"x\": [\"i\", 'lon'],\n \"y\": [\"j\", 'lat'],\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'MPI-ESM1-2-LR': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'MPI-ESM-1-2-HAM': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": 'vertices_longitude',\n \"lat_bounds\": 'vertices_latitude',\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'CNRM-CM6-1-HR': {\n \"x\": \"x\",\n \"y\": \"y\",\n \"lon\": 'lon',\n \"lat\": 'lat',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bounds\",\n \"lon_bounds\": \"bounds_lon\",\n \"lat_bounds\": \"bounds_lat\",\n 'vertex': None,\n 'time_bounds': \"time_bounds\",\n },\n 'FIO-ESM-2-0': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"vertices_longitude\",\n \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'ACCESS-ESM1-5': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"vertices_longitude\",\n \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'ACCESS-CM2': {\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"vertices_longitude\",\n \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'INM-CM4-8': { # this is a guess.\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n 'INM-CM5-0': { # this is a guess.\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": None,\n \"lat\": None,\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"lon_bnds\",\n \"lat_bounds\": \"lat_bnds\",\n 'vertex': None,\n 'time_bounds': \"time_bnds\",\n },\n 'MRI-ESM2-0':{\n \"x\": \"x\",\n \"y\": \"y\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n \"lev\": \"lev\",\n \"lev_bounds\": \"lev_bnds\",\n# \"lon_bounds\": 'x_bnds',\n# \"lat_bounds\": 'y_bnds',\n# 'vertex': None, # this is a mess. there is yet another convention. Will have to deal with this once I wrap xgcm into here.\n 'time_bounds': \"time_bnds\",\n },\n 'CIESM': { # this is a guess.\n \"x\": \"i\",\n \"y\": \"j\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n# \"lev\": \"lev\", # no 3d data available as of now\n# \"lev_bounds\": \"lev_bnds\",\n \"lon_bounds\": \"vertices_longitude\",\n \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n 'KACE-1-0-G': { # this is a guess.\n \"x\": \"lon\",\n \"y\": \"lat\",\n \"lon\": 'longitude',\n \"lat\": 'latitude',\n# \"lev\": \"lev\", # no 3d data available as of now\n# \"lev_bounds\": \"lev_bnds\",\n# \"lon_bounds\": \"vertices_longitude\",\n# \"lat_bounds\": \"vertices_latitude\",\n# \"lon_bounds\": \"vertices_longitude\",\n# \"lat_bounds\": \"vertices_latitude\",\n 'vertex': 'vertices',\n 'time_bounds': \"time_bnds\",\n },\n \n }\n # cast all str into lists\n for model in dim_name_dict.keys():\n for field in dim_name_dict[model].keys():\n if isinstance(dim_name_dict[model][field], str) or dim_name_dict[model][field] is None :\n dim_name_dict[model][field] = [dim_name_dict[model][field]]\n# add 'lon' and 'lat' as possible logical indicies for all models. This should take care of all regridded ocean output and all atmosphere models.\n if 'x' in dim_name_dict[model].keys():\n if not 'lon' in dim_name_dict[model]['x']:\n dim_name_dict[model]['x'].append('lon')\n \n if 'y' in dim_name_dict[model].keys():\n if not 'lat' in dim_name_dict[model]['y']:\n dim_name_dict[model]['y'].append('lat') \n return dim_name_dict" ]
[ "0.5776113", "0.57499915", "0.5587667", "0.556973", "0.5420494", "0.53735536", "0.5343546", "0.5342723", "0.5337498", "0.53041375", "0.5202565", "0.50983745", "0.5079809", "0.50716364", "0.5070442", "0.50446194", "0.5030284", "0.5026418", "0.5009997", "0.4993586", "0.49922198", "0.4984836", "0.49774665", "0.49724483", "0.49664608", "0.49594846", "0.49438715", "0.4925207", "0.49244875", "0.4921472", "0.49189225", "0.49107236", "0.4906937", "0.48950142", "0.48904", "0.488863", "0.48860794", "0.48684716", "0.48670316", "0.48579657", "0.48571882", "0.48571038", "0.48560968", "0.48396906", "0.4836403", "0.48310557", "0.48289186", "0.48172534", "0.48111367", "0.4803067", "0.47974882", "0.47845772", "0.4783727", "0.47800493", "0.47782725", "0.4773475", "0.47676748", "0.47667748", "0.47611842", "0.47605693", "0.4756737", "0.47544497", "0.47481158", "0.47385594", "0.47341794", "0.47291228", "0.4716439", "0.47160795", "0.4696885", "0.46943992", "0.46933225", "0.46752492", "0.46712166", "0.46618122", "0.4655131", "0.46535093", "0.46501458", "0.4649622", "0.4646153", "0.46433014", "0.464316", "0.46425778", "0.4640197", "0.46385503", "0.46364877", "0.46275783", "0.4625342", "0.46217218", "0.46209294", "0.46202198", "0.46154648", "0.46145126", "0.46122104", "0.46115708", "0.46094283", "0.46090722", "0.46032065", "0.46028638", "0.4600376", "0.45981777", "0.4596831" ]
0.0
-1
Convert list of GeneralNames to list of prefixed strings.
def extract_gnames(self, ext): res = [] for gn in ext: if isinstance(gn, x509.RFC822Name): res.append('email:' + as_unicode(gn.value)) elif isinstance(gn, x509.DNSName): res.append('dns:' + as_unicode(gn.value)) elif isinstance(gn, x509.UniformResourceIdentifier): res.append('uri:' + as_unicode(gn.value)) elif isinstance(gn, x509.IPAddress): res.append('ip:' + str(gn.value)) elif isinstance(gn, x509.DirectoryName): val = self.extract_name(gn.value) res.append('dn:' + render_name(val)) else: raise InvalidCertificate("Unsupported subjectAltName type: %s" % (gn,)) return res
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def original_names(names):\n if len(names) > 0 and isinstance(names[0], (list, tuple)):\n names = [x[0] for x in names]\n\n return [re.sub('_+$', '', x) for x in names]", "def standardize_many(self, names: list[str]) -> list[str]:\n assert type(names) == list\n return [self.standardize(name) for name in names]", "def list_to_names(names):\n names_list = []\n for n in names:\n names_list.append(names[n].details['name'])\n return names_list", "def nameList(self):\r\n return [self.name.lower(), self.code] + self._otherNames", "def extract_full_names(people):\n result = []\n \n for lst in names:\n x = ''\n for name in lst.values():\n x += ' ' + name \n x = x[1:] \n result.append(x)\n return result", "def load_gnames(self, gname_list):\n gnames = []\n for alt in gname_list:\n if ':' not in alt:\n raise InvalidCertificate(\"Invalid gname: %s\" % (alt,))\n t, val = alt.split(':', 1)\n t = t.lower().strip()\n val = val.strip()\n if t == 'dn':\n gn = x509.DirectoryName(self.load_name(parse_dn(val)))\n elif t == 'dns':\n gn = x509.DNSName(val)\n elif t == 'email':\n gn = x509.RFC822Name(val)\n elif t == 'uri':\n gn = x509.UniformResourceIdentifier(val)\n elif t == 'ip':\n if val.find(':') >= 0:\n gn = x509.IPAddress(ipaddress.IPv6Address(val))\n else:\n gn = x509.IPAddress(ipaddress.IPv4Address(val))\n elif t == 'dn':\n gn = x509.DirectoryName(self.load_name(parse_dn(val)))\n elif t == 'net':\n if val.find(':') >= 0:\n gn = x509.IPAddress(ipaddress.IPv6Network(val))\n else:\n gn = x509.IPAddress(ipaddress.IPv4Network(val))\n else:\n raise Exception('Invalid GeneralName: ' + alt)\n gnames.append(gn)\n return gnames", "def full_names(self) -> List[str]:\n self.names = [\n \".\".join(prod)\n for prod in product(*self._namespaces, self.terminals)\n ]\n return self.names", "def get_names_short(self):\r\n return [p.get_name() for p in self.people]", "def add_prefix_to_list_items(prefix: str, items: list) -> list:\r\n new_items = []\r\n for item in items:\r\n new_items.append(f\"{prefix}{str(item)}\")\r\n return new_items", "def get_short_names(self) -> List[str]:\n result = []\n for elements in self._get_results_list():\n result.append(elements[0])\n return result", "def listit(list, prefix):\n\n l = []\n for x in list:\n l.append(prefix+(x.strip()).rstrip('.fits'))\n return \",\".join(l)", "def TransformNames(self) -> _n_2_t_0[str]:", "def _toStr(toList):\n\n names = [formataddr(i) for i in zip(*toList)]\n return ', '.join(names)", "def itemnames():\n g = ['KIS_NA_39', 'VII_57', 'MX_48', 'MX_56', 'KIS_NA_42', 'VII_54',\n 'MX_S_48', 'MX_S_52', 'MX_52', 'KIS_NA_45', 'KIS_NA_51', 'MIP_45',\n 'MIP_49', 'MIP_52', 'MIP_plus_48', 'MIP_plus_51', 'MX_42', 'MX_45',\n 'MIP_G_42', 'KIS_42', 'KIS_NA_48']\n return(g)", "def rem_str(prelist,names):\n \n for prefix in prelist:\n names=[name.replace(prefix,'') for name in names]\n \n return names", "def rem_str(prelist,names):\n\n for prefix in prelist:\n names=[name.replace(prefix,'') for name in names]\n\n return names", "def formatlist(input_list):\n\n output_list = []\n for item in input_list:\n item = str(item)\n item = item.replace(\" \", \"_\")\n output_list.append(item)\n return output_list", "def prefixCombiner(prefix, itemlist, glue=''):\n result = []\n for item in itemlist:\n result.append(prefix + glue + item)\n return result", "def names(self) -> list[str]:", "def property_list_to_str(properties: th.PropertiesList) -> List[str]:\n return [name for (name, prop) in properties.items()]", "def names(cls) -> List[str]:", "def _repair_names_universal(\n names: Iterable[str], quiet: bool = False, base0_: bool = None\n) -> List[str]:\n min_names = _repair_names_minimal(names)\n neat_names = [re.sub(r\"[^\\w]\", \"_\", name) for name in min_names]\n new_names = _repair_names_unique(\n neat_names,\n quiet=True,\n sanitizer=lambda name: (\n f\"_{name}\"\n if keyword.iskeyword(name) or (name and name[0].isdigit())\n else name\n ),\n base0_=base0_,\n )\n if not quiet:\n changed_names = [\n (orig_name, new_name)\n for orig_name, new_name in zip(names, new_names)\n if orig_name != new_name\n ]\n _log_changed_names(changed_names)\n return new_names", "def generate_list_of_names(self):\n names = [donor._full_name for donor in self.donor_list]\n name_selection = \"\\n\".join(\n [\"{}\"] * len(self.donor_list)).format(*names)\n return name_selection", "def drug_names_on_drug_list(drug_list):\n return [dl[\"Drug (brand name)\"] for dl in drug_list]", "def prefix_all(value, LL):\n return [[value] + L for L in LL]", "def _groupNamesToList(settings):\n return [getattr(GroupName, val) for val in settings.dhGroups]", "def getNames(self) -> List[unicode]:\n ...", "def format_labels(_labels):\n _ret = []\n if isinstance(_labels, str):\n # put in a list if the label is a string.\n _ret = [_labels]\n elif isinstance(_labels, dict):\n for _key, _item in _labels.items():\n _ret.append(_key.strip().replace(\" \", \"-\").replace(\"_\", \"-\"))\n elif isinstance(_labels, list) or isinstance(_labels, tuple):\n for _item in _labels:\n _ret.append(_item.strip().replace(\" \", \"-\").replace(\"_\", \"-\"))\n return _ret", "def convertListToString(list):\n return re.sub(r'[^\\w ]', '', str(list))", "def str_transform_list(L):\n return [str(x) for x in L]", "def genSufList():\n global gConst\n\n sufChrList = []\n for suffix in gConst['picSufList']:\n for c in suffix:\n sufChrList.append(c)\n\n sufChrList = crifanLib.crifanList.uniqueList(sufChrList)\n # sufChrList = uniqueList(sufChrList)\n sufChrList.sort()\n joinedSuf = ''.join(sufChrList)\n swappedSuf = joinedSuf.swapcase()\n wholeSuf = joinedSuf + swappedSuf\n\n return wholeSuf", "def _repair_names_unique(\n names: Sequence[str],\n quiet: bool = False,\n sanitizer: Callable = None,\n base0_: bool = None,\n) -> List[str]:\n base = int(not base0_)\n min_names = _repair_names_minimal(names)\n neat_names = [\n re.sub(r\"(?:(?<!_)_{1,2}\\d+|(?<!_)__)+$\", \"\", name)\n for name in min_names\n ]\n if callable(sanitizer):\n neat_names = [sanitizer(name) for name in neat_names]\n\n new_names = []\n changed_names = []\n for i, name in enumerate(neat_names):\n if neat_names.count(name) > 1 or name == \"\":\n name = f\"{name}__{i + base}\"\n if name != names[i]:\n changed_names.append((names[i], name))\n new_names.append(name)\n if not quiet:\n _log_changed_names(changed_names)\n return new_names", "def replace_special_characters_in_list(self, full_list):\n return [n.replace(':','%3A') for n in full_list]", "def extract_names(collection):\n return (\n '[{}]'.format(', '.join(map(repr, groups[n].entity_names)))\n if n in groups else repr(n) for n in collection\n )", "def clean_names(names):\n\n cleaned_names = []\n for name in names:\n name_parts = [n_part for n_part in name.split() if '.' not in n_part]\n cleaned_names.append(\" \".join(name_parts))\n\n return cleaned_names", "def transform(self, nameList):\n return {name: self.transformSingle(name) for name in nameList}", "def extract_names(collection):\n return map(repr, collection)", "def Student_names(l:list)->list:\n result=[]\n for s in l:\n result.append(s.name)\n return result", "def transform_prefix(filenames, prefix_old, prefix_new):\n\n new_filenames = set([])\n len_prefix_old = len(prefix_old)\n # loop over the list of files and remove the prefix\n for name in filenames:\n name = name[len_prefix_old:]\n new_filenames.add(prefix_new + name)\n\n\n return new_filenames", "def _list_of_availability_strings():\n names = [availability.name for availability in Availability]\n return names", "def format_list(list):\n return \" \".join(str(tok) for tok in list)", "def get_aliases_string(trembl_list):\n aliases_list = []\n\n for row in trembl_list:\n psimi_trembl = \"trembl:\" + row[1]\n aliases_list.append(psimi_trembl)\n\n return \"|\".join(aliases_list)", "def personas(self, pretty=True, sort=True):\n names = list(self.name2base)\n if pretty: names = [self.process_name(name, True) for name in names]\n if sort: names = sorted(names)\n return names", "def compound_names(self) -> List[str]:\n return None", "def names(self):\n if isinstance(self.name, string_types):\n return [self.name]\n else:\n return list(self.name)", "def add_prefix(prefix = \"Peptides\"):\n var_list = gen_cell_lines_states_replicates()\n prefix = prefix\n res_list = []\n for i in var_list:\n unit_str = prefix + \" \"\n unit_str += i\n res_list.append(unit_str)\n return res_list", "def preprocess_data(extracted_data: List[Tuple[str, str]]) -> List[str]:\n return [f'__label__{data[0]} {clean_formatting(data[1])}' for data in extracted_data]", "def get_nice_names(self) -> List[str]:\n result = []\n for elements in self._get_results_list():\n result.append(elements[1])\n return result", "def translate_names(critter_names: List[str], locale: str) -> List[str]:\n if locale in ['auto', 'en-us']:\n return critter_names\n\n translation_path = os.path.join('critters', 'translations.json')\n with open(translation_path, encoding='utf-8') as fp:\n translations = json.load(fp)\n return [translations[name][locale] for name in critter_names]", "def buildstrings(self):\n slist = []\n if 0 == len(self._pctnumbers):\n slist.append(self.buildonestring('NNNN'))\n else:\n for pctnumber in sorted(self._pctnumbers):\n slist.append(self.buildonestring(pctnumber))\n\n return slist", "def _addPrefixes(data):\n prevTags = None\n newData = []\n\n for n, (token, tags) in enumerate(data):\n\n newTags = []\n\n for t in tags:\n p = \"B\" if ((prevTags is None) or (t not in prevTags)) else \"I\"\n newTags.append(\"%s-%s\" % (p, t))\n\n newData.append((token, newTags))\n prevTags = tags\n\n return newData", "def beginsWithSingleUnderscore( inList ):\n ensureInstance( inList, list, level=2 )\n for anElement in inList:\n assert isinstance( anElement, str ), \"inList must be string\"\n \n outList= []\n \n for anElement in inList:\n if len( anElement ) >= 2 and anElement[0] == \"_\" and anElement[1] != \"_\":\n outList.append( anElement )\n \n return outList", "def toStrList(values, precision=None):\n\treturn list(map(lambda va: toStr(va, precision), values))", "def format(self):\n\n try:\n return sorted(list(set(self.main_list)), key=str.lower)\n except TypeError:\n return self.main_list", "def encode_san_dns_names(self, san):\n dns_names = []\n for dns_name in san:\n dns_names.append(x509.DNSName(dns_name))\n return dns_names", "def names_for(self, name):\n names = [\"%s.%s\" % (self.package, name)]\n if self.prefix:\n names.append(\"%s.%s\" % (self.prefix, names[0]))\n return names", "def createtext(lst):\n newlst = []\n for item in lst:\n item = item.replace(\"_!\",\"\")\n newlst.append(item)\n text = ' '.join(newlst)\n # Lower-casing\n return text.lower()", "async def process_prefix_list(\n guild: disnake.Guild,\n ctx: commands.Context = None,\n inter: AppCmdInter = None,\n allowed_mentions=None,\n):\n await create_guild_model(guild)\n guild = await Guild.get(guild.id)\n msg = f\"The following are the custom prefixes for {guild.name}:\\n\" + \", \".join(\n guild.prefixes\n )\n await send_message(msg=msg, ctx=ctx, inter=inter, allowed_mentions=allowed_mentions)", "def _clean_budget_names(recarray, names):\n newnames = []\n mbnames = [\"TOTAL_IN\", \"TOTAL_OUT\", \"IN-OUT\", \"PERCENT_DISCREPANCY\"]\n for name in names:\n if name in mbnames:\n newnames.append(name)\n elif (\n not name.startswith(\"FROM_\")\n and not name.startswith(\"TO_\")\n and not name.endswith(\"_IN\")\n and not name.endswith(\"_OUT\")\n ):\n newname_in = \"FROM_\" + name.upper()\n newname_out = \"TO_\" + name.upper()\n if newname_in in recarray[\"name\"]:\n newnames.append(newname_in)\n if newname_out in recarray[\"name\"]:\n newnames.append(newname_out)\n else:\n if name in recarray[\"name\"]:\n newnames.append(name)\n return newnames", "def format_hex(self, list_converted):\n dict_hex = {10: 'A', 11: 'B', 12: 'C', 13: 'D', 14: 'E', 15: 'F'}\n list_converted = [dict_hex[n] if n in dict_hex.keys() else str(n) for n in list_converted]\n return list_converted", "def make_label_names(name_lsit):\n\n hover_label_names = []\n for x in range(len(name_lsit)):\n temp1 = name_lsit[x]\n hover_label_names.append(temp1)\n\n return hover_label_names", "def get_exchanges_short_names() -> List:\n shorts = get_all_exchange_short_names()\n\n return shorts", "def player_names(players):\r\n string = ''\r\n for p in players:\r\n string = string + p.name + ', '\r\n return string", "def _convertListToString(self, list_of_objects):\n return (';').join(list_of_objects)", "def disambiguate(names: list[str], mark: str = \"1\") -> list[str]:\n names_seen = set()\n new_names = []\n for name in names:\n new_name = name\n while new_name in names_seen:\n new_name += mark\n new_names.append(new_name)\n names_seen.add(new_name)\n\n return new_names", "def get_short_currencies_names():\n short_names = [[x, cur_dict[x][1]] for x in cur_dict]\n return short_names\n # return [['Bitcoin','BTC'], ['Litecoin', 'LTC']]", "def get_list_as_str(list_to_convert):\n return \", \".join([\"'{}'\".format(list_item) for list_item in list_to_convert])", "def unicode_list_to_str(u_code_list): #This is just a function for me. Has nothing to do with flask or anything, okay?\n out_list = \"\"\n for item in u_code_list:\n out_list = out_list + str(item) + \"-\"\n return out_list.rstrip(\"-\") #removes the extra '-' (i.e 2-3-4-1-)", "def format_troops(troops_raw: list):\n troops = set()\n for troop in troops_raw:\n troop = troop.strip()\n troop = troop.replace(\" \", \"_\")\n troop += \".lua\"\n\n if troop != \".lua\":\n troops.add(troop)\n\n return troops", "def generate_name(path_list):\n name = path_list[0]\n for item in path_list[1:]:\n name += \"[\" + item + \"]\"\n return name", "def coerce_class_names(classes):\n return [getattr(val, 'registry_name', val) for val in classes] \\\n if hasattr(classes, '__iter__') and not isinstance(classes, str) \\\n else getattr(classes, 'registry_name', classes)", "def furanose_names(self):\n output = set()\n for item in self.monomers():\n if item in self.furanose_fac:\n output.add(self.furanose_fac[item][\"name\"])\n return list(output)", "def currentAntennaNames(carmaOnly=False) :\n a=s.getAntennaAssignments()\n namelist = []\n for i in a:\n cname = i.carmaAntennaName\n tname = i.typedAntennaName\n if (carmaOnly) :\n names = i.carmaAntennaName\n else :\n names = \"%s(%s)\" %(cname,tname)\n namelist.append(names)\n return namelist", "def keep_lowercase(str_list):", "def lower(self, text_list):\n return [text.lower() for text in text_list]", "def donor_names():\n names = list()\n for name in donor_db:\n names = names + [name[0]]\n return names", "def get_names_from_full_objects(objects):\n return [obj.name for obj in objects]", "def getFeatureNames(self):\n feature_names = super().getFeatureNames()\n feature_names.extend([\"f101\", \"f102\", \"f105\", \"fNum\", \"fCapStart\", \"fCapNoStart\"])\n return feature_names", "def names(self) -> List:\n ...", "def getPeopleNames(the_list):\n new_list = []\n if type(the_list) == list:\n for person in the_list:\n if person['@type'] == \"Person\":\n new_list.append(person['name'])\n else:\n new_list.append(the_list['name'])\n return new_list", "def parse_genres(genres):\n\tgenre_list = []\n\tfor genre in genres:\n\t\tgenre_list.append(genre.name)\n\n\treturn \", \".join(genre_list)", "def clean_names_list(names):\n pure_names = []\n nan = re.compile('nan', re.IGNORECASE)\n title = re.compile('surname', re.IGNORECASE)\n for name in names:\n if nan.search(name):\n continue\n elif title.search(name):\n continue\n else:\n pure_names.append(name)\n return pure_names", "def get_seq_names(self) -> List[str]:\n return [seq.Name.lower() for seq in self.Sequencers]", "def nametitles(cls) -> t.List[NameTitle]:\n return [label for label in cls.values() if isinstance(label, tuple)]", "def extract_dataset_names(list_of_files, prefix = \"\", suffix = \"\"):\r\n dataset_names = []\r\n for filename in list_of_files:\r\n dataname = re.sub(\"(.*?)(\\.)(\"+prefix+\")(.*)(\"+suffix+\")\", \"\\\\4\", filename)\r\n dataset_names.append(dataname)\r\n assert dataset_names != [], \"dataset_names did not populate\"\r\n return dataset_names", "def build_frequency_list(name_list):\n analyzer = build_analyzer()\n char_list = []\n for name in name_list:\n char_list += analyzer(name)\n return char_list", "def nsrGenera(taxonList, synonymList):\r\n species = list(filter(None, sorted(taxonList + synonymList)))\r\n generaList = [i.split()[0] for i in species]\r\n generaList = list(dict.fromkeys(generaList))\r\n return generaList", "def process_names( names ):\n\tp_list = []\n\tfor i in xrange( len( names ) ):\n\t\t#print i\n\t\tp_list.append( str(i) + \"__\" + names[i] )\n\n\tRV = \";\".join(p_list)\n\treturn( RV )", "def _cls_repr(self):\n prefixes = []\n for k in self.names:\n # list only params with not default values\n if self[k].isDefault:\n continue\n prefixes.append(\"%s=%s\" % (k, self[k].value))\n return prefixes", "def to_lower(self, word_list):\n return [word.lower() for word in word_list]", "def name_get(self):\n result = []\n for r in self:\n result.append((r.id, u\"%s %s\" % ('PO', r.name)))\n return result", "def reformatList( listOfPaths):\n newList = []\n first = True\n for seg in listOfPaths: \n newList += seg.asSVGCommand(first)\n first = False\n return newList", "def get_name_list(msh, varname):\n return [str(chartostring(v)) for v in msh.variables[varname]]", "def process_list(a_list: list):\n\n return ', '.join(str(s) for s in a_list) if a_list else Presenter.DEFAULT", "def create_fixer_names(fixes, nofixes):\n # Taken from lib2to3.main:\n fixer_pkg = 'lib2to3.fixes'\n avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg))\n unwanted_fixes = set(fixer_pkg + \".fix_\" + fix for fix in nofixes)\n explicit = set()\n if fixes:\n all_present = False\n for fix in fixes:\n if fix == \"all\":\n all_present = True\n else:\n explicit.add(fixer_pkg + \".fix_\" + fix)\n requested = avail_fixes.union(explicit) if all_present else explicit\n else:\n requested = avail_fixes.union(explicit)\n fixer_names = requested.difference(unwanted_fixes)\n return fixer_names", "def _to_cc_list(collection):\n return \"{\" + \", \".join(collection) + \"}\"", "def create_list_string(list_):\n return f\"[{' '.join(list_)}]\"", "def dps_string_list(dps_data):\n return [f\"{id} (value: {value})\" for id, value in dps_data.items()]", "def lowercase(nameslist):\n lowercasenames = list(filter(lambda x: x == x.lower(), nameslist))\n #print(f\"No. of lowercase names: {len(lowercasenames)}\")\n return lowercasenames", "def __lettersToString(self, words):\r\n \r\n li = []\r\n \r\n for word in words:\r\n li.append(\"\".join(word))\r\n \r\n return li" ]
[ "0.6417263", "0.6408999", "0.6287097", "0.61460066", "0.6118145", "0.6083461", "0.60616726", "0.60493654", "0.6008381", "0.5945094", "0.59157145", "0.59041774", "0.5835183", "0.5815753", "0.5810204", "0.57882947", "0.57405", "0.5714124", "0.568949", "0.56647664", "0.56575066", "0.5646048", "0.5628689", "0.5613883", "0.5592374", "0.55880946", "0.55765015", "0.55219173", "0.55033726", "0.5491321", "0.54560256", "0.5444473", "0.543409", "0.54041433", "0.5399978", "0.5396138", "0.538322", "0.5380528", "0.5378647", "0.53720653", "0.5370766", "0.53544694", "0.5314247", "0.5302641", "0.5301046", "0.5294193", "0.5292585", "0.52886546", "0.52885383", "0.5276754", "0.52698445", "0.52581066", "0.52438647", "0.5237879", "0.51990783", "0.5193352", "0.51915467", "0.5190136", "0.51886547", "0.5188595", "0.51798207", "0.5173082", "0.5170506", "0.51682264", "0.516585", "0.51517326", "0.514575", "0.51408386", "0.51373094", "0.5134907", "0.51337063", "0.5124091", "0.5123089", "0.51229113", "0.51179284", "0.5115619", "0.5110955", "0.5110171", "0.50991493", "0.5096572", "0.50888747", "0.50880206", "0.50836277", "0.50763047", "0.50636107", "0.5061043", "0.50577354", "0.5057615", "0.505109", "0.5051072", "0.504815", "0.5046907", "0.5045407", "0.50435585", "0.50367373", "0.50333846", "0.50293845", "0.5028624", "0.5021489", "0.5017644" ]
0.54241467
33
Create Name object from subject's DistinguishedName.
def load_name(self, name_att_list): attlist = [] got = set() for k, v in name_att_list: if k in got and k not in DN_ALLOW_MULTIPLE: raise InvalidCertificate("Multiple Name keys not allowed: %s" % (k,)) oid = DN_CODE_TO_OID[k] n = x509.NameAttribute(oid, as_unicode(v)) attlist.append(n) return x509.Name(attlist)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create(subdomain, subject_type_or_type_name, subject_name, author):\n return Subject(key_name='%s:%s' % (subdomain, subject_name),\n type=get_name(subject_type_or_type_name), author=author)", "def get_name(self):\n return self.load_name(self.subject)", "def create_subject(name=\"Basket Weaving\"):\n subj = Subject(name=name)\n subj.save()\n return subj", "def create_domain_name(self, name):\n return (\"%s.%s.%s\" % (name, \"net\", self.domain)).lower()", "def get_subject(self):\n ri = self.get_request_info()\n if ri['subject'] is None:\n ri['subject'] = None\n # setup first RDN sequence\n ri['subject'][0] = None\n\n subject = ri['subject'][0]\n return name.X509Name(subject)", "def generate_x509_name(self, cn):\n name_attributes = [\n x509.NameAttribute(NameOID.COMMON_NAME, cn),\n ]\n if self.settings['csr_country_name']:\n name_attributes.append(\n x509.NameAttribute(\n NameOID.COUNTRY_NAME,\n self.settings['csr_country_name'],\n )\n )\n if self.settings['csr_state_or_province_name']:\n name_attributes.append(\n x509.NameAttribute(\n NameOID.STATE_OR_PROVINCE_NAME,\n self.settings['csr_state_or_province_name'],\n )\n )\n if self.settings['csr_locality_name']:\n name_attributes.append(\n x509.NameAttribute(\n NameOID.LOCALITY_NAME,\n self.settings['csr_locality_name'],\n )\n )\n if self.settings['csr_organization_name']:\n name_attributes.append(\n x509.NameAttribute(\n NameOID.ORGANIZATION_NAME,\n self.settings['csr_organization_name'],\n )\n )\n return x509.Name(name_attributes)", "def subject(self, value):\n\n is_dict = isinstance(value, dict)\n if not isinstance(value, x509.Name) and not is_dict:\n raise TypeError(_pretty_message(\n '''\n subject must be an instance of asn1crypto.x509.Name or a dict,\n not %s\n ''',\n _type_name(value)\n ))\n\n if is_dict:\n value = x509.Name.build(value)\n\n self._subject = value", "def create_name (self):\n return self.create_topic().create_name('Name')", "def subject_name(self, subject_name):\r\n\r\n self._subject_name = subject_name", "def resolve_name(obj, _):\n return obj.name.decode()", "def convert_x509_name(name):\n types = {\n 'country_name': 'C',\n 'state_or_province_name': 'ST',\n 'locality_name': 'L',\n 'organization_name': 'O',\n 'organizational_unit_name': 'OU',\n 'common_name': 'CN',\n 'email_address': 'emailAddress'\n }\n\n return '/'.join(['{}={}'.format(types[attr], name.native[attr]) for attr in name.native])", "def GetDummyName(basename, suffix='', domain='', zero_based=True):\n return _C.GetDummyName(basename, suffix, domain, zero_based)", "def generate_name(host, subject_type_or_type_name):\n id = UniqueId.create_id()\n return '%s/%s.%d' % (host, get_name(subject_type_or_type_name), id)", "def convert_name(self, human_name):\n\n human_name = HumanName(human_name)\n if human_name.suffix:\n self.metadata[\"gutenberg_name_suffix\"] = human_name.suffix\n human_name.suffix = \"\"\n if human_name.nickname:\n # LOGGER.debug(\"%s nickname: %s\", str(human_name), human_name.nickname)\n no_nickname = copy.copy(human_name)\n no_nickname.nickname = \"\"\n first_name_match = re.match(\n re.sub(r\"(([A-Z])[a-z]*[.])\", r\"\\2\\\\w+\", human_name.first, re.UNICODE),\n human_name.nickname,\n re.UNICODE\n )\n # LOGGER.debug(\n # \"%s, %s\",\n # re.sub(\n # r\"(([A-Z])[a-z]*[.])\", r\"\\2\\\\w+\",\n # human_name.first,\n # re.UNICODE\n # ),\n # human_name.nickname\n # )\n if first_name_match and len(first_name_match.group(0)) >= len(human_name.first):\n human_name.first = first_name_match.group(0)\n human_name.nickname = human_name.nickname[len(human_name.first):].strip()\n # LOGGER.debug(\"Adding %s to aliases\", str(no_nickname))\n self.metadata[\"aliases\"] = set([str(no_nickname)])\n middle_name_match = re.match(\n re.sub(r\"(([A-Z])[a-z]*[.])\", r\"\\2\\\\w+\", human_name.middle, re.UNICODE),\n human_name.nickname,\n re.UNICODE\n )\n # LOGGER.debug(\n # \"%s, %s\",\n # re.sub(\n # r\"(([A-Z])[a-z]*[.])\", r\"\\2\\\\w+\",\n # human_name.middle, re.UNICODE\n # ),\n # human_name.nickname\n # )\n if middle_name_match and len(middle_name_match.group(0)) >= len(human_name.middle):\n human_name.middle = middle_name_match.group(0)\n human_name.nickname = human_name.nickname[len(human_name.middle):].strip()\n # LOGGER.debug(\"Adding %s to aliases\", str(no_nickname))\n self.metadata[\"aliases\"].add(str(no_nickname))\n return human_name", "def getSubject(self):\n\n return X501DN.from_POW(self.get_POW().getSubject())", "def getSubject(self):\n\n return X501DN.from_POW(self.get_POW().getSubject())", "def resolve_first_name(obj, _):\n return obj.first_name.decode()", "def test_copy_without_name_change(self):\n subject_copy = copy_subject(self.subject, self.DATA_MODEL, change_name=False)\n self.assertEqual(\"Subject\", subject_copy[\"name\"])", "def get_certificate_name(cert_data) -> str:\r\n if cert_data is None:\r\n return None\r\n\r\n cert = x509.load_pem_x509_certificate(cert_data, default_backend())\r\n for fields in cert.subject:\r\n current = str(fields.oid)\r\n if \"commonName\" in current:\r\n return fields.value", "def create_dns_name ( base_name, name ) :\n return create_r53_name( base_name, name) + '.mse-esp.com'", "def create_internal_dns_name ( base_name, name ) :\n name = name + '.internal'\n return create_dns_name( base_name, name )", "def _get_domain_from_certificate_name(self, cert_name):\n # Remove Let's Encrypt prefix\n cert_name = cert_name.lstrip('le-')\n\n # Remove trailing numbers if present (as last 10 characters)\n name_fragments = cert_name.split('-')\n if len(name_fragments) > 1 and name_fragments[-1].isdigit():\n name_fragments = name_fragments[:-1]\n return '-'.join(name_fragments)", "def get_name(name_or_entity):\n if isinstance(name_or_entity, (Subject, SubjectType)):\n return name_or_entity.name\n elif isinstance(name_or_entity, db.Model):\n return name_or_entity.key().name()\n else:\n return name_or_entity", "def get_from_subject(mesid, mailbox):\n res, data = mailbox.fetch(mesid, 'BODY.PEEK[HEADER.FIELDS (SUBJECT FROM)]')\n if res != 'OK':\n raise RuntimeError('error in fetch call for {}'.format(mesid))\n # Apparently default character set for IMAP is UTF7\n myheads = data[0][1].decode('utf-7')\n name = get_from(myheads)\n\n subject = findall(r'Subject:\\s+(.*)\\r\\n', myheads)[0] # Assume match\n return ' '.join((name, ':', subject))", "def subject_property_name(self):\n subject_property_name = 'subject'\n if 'participant' in self.schemas.keys():\n subject_property_name = 'participant'\n return subject_property_name", "def test_copy_name(self):\n subject_copy = copy_subject(self.subject, self.DATA_MODEL)\n self.assertEqual(\"Subject (copy)\", subject_copy[\"name\"])", "def _ensure_fqdn(self, name):\n if name[-1:] != \".\":\n return \"%s.\" % name\n else:\n return name", "def __init__(self, base):\n if isinstance(base, str):\n self._name = base\n else:\n raise TypeError(NAME_CREATE_ERROR)", "def get_domain_name(self, DomainName: str) -> Dict:\n pass", "def subject_member_create(context, values, session=None):\n memb_ref = models.SubjectMember()\n _subject_member_update(context, memb_ref, values, session=session)\n return _subject_member_format(memb_ref)", "def build_person(self, doc, entity):\n match = self.person_re.match(entity)\n if match and validations.validate_person_name(match.group(self.PERSON_NAME_GROUP)):\n name = match.group(self.PERSON_NAME_GROUP).strip()\n email = match.group(self.PERSON_EMAIL_GROUP)\n if (email is not None) and (len(email) != 0):\n return creationinfo.Person(name=name, email=email.strip())\n else:\n return creationinfo.Person(name=name, email=None)\n else:\n raise SPDXValueError('Failed to extract person name')", "def get_name(self):\n return self.normalize_name(self.name)", "def getSubject(self, record):\n base_subject = super(CustomSMTPHandler, self).getSubject(record)\n try:\n hostname = platform.node()\n # pylint: disable=broad-except\n except Exception:\n hostname = 'Unknown'\n\n return base_subject.format(hostname)", "def fqdn_identifier(fqdn):\n return messages.Identifier(\n typ=messages.IDENTIFIER_FQDN, value=fqdn)", "def parse_name(self, transcript: str) -> None:\n name_match = re.match(\n r\".*(?=Unofficial\\ UNDERGRADUATE\\ ACADEMIC\\ RECORD)\", transcript, RE_OPT\n )\n if not name_match:\n raise ValueError(\"Name not found\")\n self.name = name_match.group(0).strip()", "def make_name(self):\n first, last = \"\", \"\"\n\n def get_first(self):\n \"\"\" Generate a first name \"\"\"\n return \"%s%s\" % (\n weighted_choice([(\"\", 39), (\"We put our faith in \", 1)]),\n choice(self.first_chunks).title()\n )\n\n def get_last(self):\n \"\"\" Generate a last name \"\"\"\n return \"%s%s%s\" % (\n # As per the original list there's a 1/39 (not conting Bob)\n # chance for a 'Mc' prefix to the lastname\n #\n # Can also, with low propability be \"von <lastname>\"\n weighted_choice([(\"\", 35), (\"Mc\", 3), (\"von \", 1)]),\n choice(self.second_chunks).title(),\n choice(self.third_chunks))\n\n # Avoid the first name reappearing in the last name...\n while first.lower() in last.lower():\n first = get_first(self)\n last = get_last(self)\n\n # Always exclaimatory\n return \"%s %s!\" % (first, last)", "def strToFQN(self, x):\n try:\n obj = reflect.namedObject(x)\n fqn = reflect.fullyQualifiedName(obj)\n except:\n return\n return fqn", "def get_name(self, name, namespace=None):\n if namespace is None:\n namespace = self._default_namespace\n return Name(name=name, namespace=namespace)", "def name_create(self, name):\n values = {\n 'name': name,\n }\n return self.create(values).name_get()[0]", "def _set_subject_alt(self, name, values):\n\n if self._subject_alt_name is not None:\n filtered_general_names = []\n for general_name in self._subject_alt_name:\n if general_name.name != name:\n filtered_general_names.append(general_name)\n self._subject_alt_name = x509.GeneralNames(filtered_general_names)\n\n else:\n self._subject_alt_name = x509.GeneralNames()\n\n if values is not None:\n for value in values:\n new_general_name = x509.GeneralName(name=name, value=value)\n self._subject_alt_name.append(new_general_name)\n\n if len(self._subject_alt_name) == 0:\n self._subject_alt_name = None", "def subject(self) -> str:\n return self[\"Sns\"][\"Subject\"]", "def MAKE_NAME(name):\n name = name.replace('$', 'DOLLAR')\n name = name.replace('.', 'DOT')\n if name.startswith('__'):\n return '_X' + name\n elif name[0] in '01234567879':\n return '_' + name\n return name", "def _get_name(self, category, name, snake_case=True):\n if snake_case:\n name = xform_name(name)\n\n return self._renamed.get((category, name), name)", "def _get_domain_for_name(self, name):\n domain = self.connection.lookupByName(name)\n return domain", "def domain_dns_name(self):\n domain_dn = self.get_default_basedn()\n return domain_dn.canonical_str().split('/')[0]", "def subject(self) -> \"str\":\n return self._attrs.get(\"subject\")", "def subject(self) -> \"str\":\n return self._attrs.get(\"subject\")", "def _convert_name(self, name):\n if not self.re_name.match(name):\n org_name = name\n name = self.re_white.sub('_', name)\n name = self.re_alpha.sub('_', name)\n if not self.re_name.match(name):\n name = 'x_' + name2\n self.warn('Converting name <' + org_name + '> to <' + name + '>.')\n return name", "def subject(self):\n subject = re.sub(RE_PATTERNS, '', self.header('Subject', ''))\n subject = re.sub(FW_PATTERNS, '', subject)\n return subject.strip()", "def get_domain_name(self):\n return self.domain_name.get_text()", "def email_to_name(email):\n n = email.split(\"@\")[0].title()\n return n.replace(\".\", \" \")", "def subject(self, subject: \"str\"):\n self._attrs[\"subject\"] = subject", "def subject(self, subject: \"str\"):\n self._attrs[\"subject\"] = subject", "def get_obj_name(ra, dec, obj_naming_sys='sdss'):\n\tif obj_naming_sys == 'sdss':\n\t\treturn getSDSSName_fromRADEC(ra, dec)\n\telse: \n\t\traise ValueError(\"[objnaming] obj_naming_sys not recognized\")", "def _create_name(self) -> str:\n return self.stream.__class__.__name__", "def create_domain(self, domain_name):\r\n params = {'DomainName':domain_name}\r\n d = self.get_object('CreateDomain', params, Domain)\r\n d.name = domain_name\r\n return d", "def canonical_name(self, name):\n raise NotImplementedError", "def get_valid_subject_name(self, position):\n \"\"\"\n Arguments:\n position: position of the name in the list\n Returns:\n valid subject's name\n \"\"\"\n assert position < self.n_valid,\\\n \"The total number of validation samples is: %d\" % self.n_valid\n return self.validation_subjects[position]", "def generate(self):\n return Name(forename=choice(self.first_names, p=self.first_name_freq),\n surname=choice(self.last_names, p=self.last_name_freq))", "def create_subject(name: str, title: str, type_: str) -> SubjectID:\n subject = DbSubject(name, title, type_)\n\n db.session.add(subject)\n db.session.commit()\n\n return _db_entity_to_subject(subject)", "def create_domain(self, domain_name):\n params = {'DomainName': domain_name}\n d = self.get_object('CreateDomain', params, Domain)\n d.name = domain_name\n return d", "def get_name(self):\n return m2.x509_extension_get_name(self.x509_ext)", "def get_by_subject(subject_name):\n return filter_by_prefix(Subscription.all(), subject_name + ':')", "def get_name_from_email(email):\r\n individual_name = email.split('@')[0]\r\n parts = individual_name.split('.')\r\n name = \" \".join(parts).title()\r\n return name", "def create_name(length):\n if length <= 0:\n return None\n else:\n return create_random_name(length)", "def get_cert_DNSNames(cert):\n try:\n ext = cert.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME)\n dns_names = ext.value.get_values_for_type(x509.DNSName)\n except Exception:\n raise exception.SysinvException(_(\n \"Failed to get certificate SAN's DNSNames.\"))\n return dns_names", "def name(self, cname: str)->str:\n return self.like(cname, mx=1)[0]['cname']", "def subject(self):\n return self.properties.get(\"subject\", None)", "def get_user_provided_subject_identifier_attrname(self):\n return None", "def objToFQN(self, x):\n try:\n fqn = reflect.fullyQualifiedName(x)\n reflect.namedObject(fqn)\n except:\n return\n return fqn", "def test_record_name(self):\n zone = Zone('test.example.com')\n record = Record(zone, 'test-record', {'type': 'A', 'ttl': 300})\n self.assertEqual(record.name, 'test-record')", "def extract_name(self, name):\n name_oid2code_map = {v: k for k, v in DN_CODE_TO_OID.items()}\n res = []\n for att in name:\n if att.oid not in name_oid2code_map:\n raise InvalidCertificate(\"Unsupported RDN: %s\" % (att,))\n desc = name_oid2code_map[att.oid]\n val = as_unicode(att.value)\n res.append((desc, val))\n return res", "def get_topicname ( base_name, object_type, condition ) :\n return base_name + '-' + object_type.upper( ) + '-' + condition.upper( )", "def __normalize_name(self):\n self.normalized_name = normalizeSimplified(self.name)", "def get_name() -> str:", "def DNSServiceConstructFullName(\n service = None,\n regtype = _NO_DEFAULT, \n domain = _NO_DEFAULT,\n ):\n\n _NO_DEFAULT.check(regtype)\n _NO_DEFAULT.check(domain)\n\n _global_lock.acquire()\n try:\n fullName = _DNSServiceConstructFullName(service, regtype, domain)\n finally:\n _global_lock.release()\n\n return fullName.value.decode('utf-8')", "def name(self):\n return utils.force_name_case(self._name)", "def __init__(self, author_and_email_tuple):\n name, email = author_and_email_tuple\n first_last = name.strip()\n if first_last in names_equivalences:\n first_last = names_equivalences[first_last]\n self.first_last = first_last\n self.email = email.lower()\n last_name_fragment, suffix = self._last_name(name)\n name_sep = name.index(last_name_fragment)\n self.first = name[:name_sep].rstrip()\n self.last = last_name_fragment\n self.suffix = suffix\n if not self.first:\n self.last_first = self.last\n else:\n self.last_first = u', '.join([self.last, self.first])\n if self.suffix:\n self.last_first += u', ' + self.suffix\n if self.last == \"van Rossum\":\n # Special case for our beloved BDFL. :)\n if self.first == \"Guido\":\n self.nick = \"GvR\"\n elif self.first == \"Just\":\n self.nick = \"JvR\"\n else:\n raise ValueError(\"unkown van Rossum %r!\" % self)\n self.last_first += \" (%s)\" % (self.nick,)\n else:\n self.nick = self.last", "def ValidateName(args):\n account = properties.VALUES.core.account.Get(required=True)\n if account.find('@') == -1:\n username = account\n else:\n username = account[0:account.find('@')]\n\n args.name = args.name or username", "def create_Name(var_name, right_hand_side=True, line=0, column=0):\n name = ast.Name()\n name.id = var_name\n name.lineno = line\n name.col_offset = column\n\n if right_hand_side:\n name.ctx = ast.Load()\n else:\n name.ctx = ast.Store()\n\n return name", "def folder_name(subjfolder, newfolder=None, idregex=None):\n log.info('anonymizer.py folder_name {0} {1} {2}'.format(subjfolder.encode('utf-8'),\n newfolder,\n idregex))\n\n subjfolder = get_abspath(subjfolder)\n\n if newfolder is not None:\n log.info('Moving: ' + subjfolder + ' -> ' + newfolder)\n path(subjfolder).move(newfolder)\n\n return newfolder\n\n #newfolder is None, let's find other name\n #get name of the base folder\n basedir = subjfolder.dirname()\n if subjfolder[-1] == os.sep:\n basedir = basedir.dirname()\n\n if idregex is not None:\n subjid = re.search(idregex, subjfolder.basename()).group(0)\n if len(subjid) > 3:\n newfolder = subjid\n else:\n log.error('Could not find \"{0}\" on folder name {1}.'.format(idregex,\n basedir))\n\n #try to guess new folder name from DICOM headers\n if newfolder is None:\n log.info('Reading internal DICOMs PatientID to get new folder name.')\n newfolder = get_patient_mri_id(subjfolder)\n\n if newfolder is None:\n log.error('Could not find a folder name for {0} from DICOMs.'.format(subjfolder))\n return subjfolder\n\n #else:\n #move the subjfolder to the new folder if it has a different name\n newfolder = os.path.join(basedir, newfolder)\n if subjfolder != newfolder:\n log.info('Moving: ' + subjfolder + ' -> ' + newfolder)\n path(subjfolder).move(newfolder)\n\n return newfolder", "def _username_from_name(self, name):\r\n return name.replace(' ', '_')", "def create_uuid3(namespace, name):\n return uuid.uuid3(namespace, six.ensure_str(name))", "def __init__(self, name):\n self.name = name\n self.birthday = None\n self.lastName = name.split(' ')[-1]", "def __init__(self, name):\n self.name = name\n self.birthday = None\n self.lastName = name.split(' ')[-1]", "def __init__(self, name):\n self.name = name\n self.birthday = None\n self.lastName = name.split(' ')[-1]", "def test_name(self):\n node = self.create(ObjectNodeItem, UML.ObjectNode)\n name = node.shape.icon.children[1]\n\n node.subject.name = \"Blah\"\n\n assert \"Blah\" == name.text()", "def _get_subject_alt(self, name):\n\n if self._subject_alt_name is None:\n return []\n\n output = []\n for general_name in self._subject_alt_name:\n if general_name.name == name:\n output.append(general_name.native)\n return output", "def __init__(self, name):\n self.name = name\n self.birthday = None\n self.lastName = name.split(\" \")[-1]", "def to_name(self, to_name):\n self._to_name = to_name", "def make_systematic_name(name):\n return \" \".join(re.findall(r\"([A-Z]+[a-z]*)\", name)).capitalize()", "def create_random_name(length):\n name = (''.join(random.choices(string.ascii_lowercase, k=length)))\n name_capitalized = name.capitalize()\n return name_capitalized", "def __init__(self, name, namespace=None):\n # Normalize: namespace is always defined as a string, possibly empty.\n if namespace is None:\n namespace = ''\n\n if '.' in name:\n # name is absolute, namespace is ignored:\n self._fullname = name\n\n match = _RE_FULL_NAME.match(self._fullname)\n if match is None:\n raise SchemaParseException(\n 'Invalid absolute schema name: %r.' % self._fullname)\n\n self._name = match.group(1)\n self._namespace = self._fullname[:-(len(self._name) + 1)]\n\n else:\n # name is relative, combine with explicit namespace:\n self._name = name\n self._namespace = namespace\n self._fullname = (self._name\n if (not self._namespace) else\n '%s.%s' % (self._namespace, self._name))\n\n # Validate the fullname:\n if _RE_FULL_NAME.match(self._fullname) is None:\n raise SchemaParseException(\n 'Invalid schema name %r inferred from name %r and namespace %r.'\n % (self._fullname, self._name, self._namespace))", "def resolve_last_name(obj, _):\n return obj.last_name.decode()", "def encode_san_dns_names(self, san):\n dns_names = []\n for dns_name in san:\n dns_names.append(x509.DNSName(dns_name))\n return dns_names", "def instantiate_subject(cls, *args, **kwargs) -> Any:\n return cls.subject_type()(*args, **kwargs)", "def sanitize_name(self):\n self._name = self.get_name().strip()", "def get_pretty_subject(cert):\n subject = 'subject=' + _get_pretty_name(cert.get_subject())\n issuer = 'issuer=' + _get_pretty_name(cert.get_issuer())\n return subject + '\\n' + issuer + '\\n'", "def __create_classname(self, fullname):\n return PACKAGE_NAME + \".\" + fullname", "def _FormalizeName(cls, name):\n name = name.replace(\"_\", \"-\").lower()\n name = name[:cls.NAME_LENGTH_LIMIT]\n if name[-1] == \"-\":\n name = name[:-1] + cls.REPLACER\n return name" ]
[ "0.6041832", "0.57824373", "0.5740667", "0.56460947", "0.5608916", "0.5546563", "0.5349921", "0.5217777", "0.51690304", "0.51497644", "0.51314163", "0.5014414", "0.4994642", "0.49935788", "0.49472374", "0.49472374", "0.48983237", "0.4867557", "0.48653823", "0.4858505", "0.48528245", "0.48191625", "0.48063064", "0.47903714", "0.4788402", "0.47736162", "0.4744955", "0.47371292", "0.47333068", "0.4727912", "0.47111222", "0.47000962", "0.46984062", "0.46947113", "0.46863872", "0.46731338", "0.4670196", "0.4641892", "0.46353593", "0.46331793", "0.462966", "0.46215382", "0.46147043", "0.46013165", "0.46002537", "0.45915157", "0.45915157", "0.45626378", "0.45620573", "0.4557514", "0.45502385", "0.45443016", "0.45443016", "0.4538629", "0.45293078", "0.4525454", "0.4504761", "0.4500806", "0.44794962", "0.4470758", "0.44519806", "0.44445053", "0.44332713", "0.4432678", "0.44243774", "0.44214442", "0.44177902", "0.4416161", "0.4414926", "0.4411586", "0.4396869", "0.43942857", "0.43942648", "0.43940255", "0.43931678", "0.43901166", "0.4389533", "0.43809396", "0.4379958", "0.4377008", "0.43769652", "0.4365799", "0.43547782", "0.43514293", "0.43514293", "0.43514293", "0.43510365", "0.43504024", "0.43499577", "0.434891", "0.434748", "0.4342062", "0.4340495", "0.4336707", "0.43343532", "0.43288583", "0.43266657", "0.43253386", "0.43227938", "0.43198204" ]
0.45507523
50
Create Name object from subject's DistinguishedName.
def get_name(self): return self.load_name(self.subject)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create(subdomain, subject_type_or_type_name, subject_name, author):\n return Subject(key_name='%s:%s' % (subdomain, subject_name),\n type=get_name(subject_type_or_type_name), author=author)", "def create_subject(name=\"Basket Weaving\"):\n subj = Subject(name=name)\n subj.save()\n return subj", "def create_domain_name(self, name):\n return (\"%s.%s.%s\" % (name, \"net\", self.domain)).lower()", "def get_subject(self):\n ri = self.get_request_info()\n if ri['subject'] is None:\n ri['subject'] = None\n # setup first RDN sequence\n ri['subject'][0] = None\n\n subject = ri['subject'][0]\n return name.X509Name(subject)", "def generate_x509_name(self, cn):\n name_attributes = [\n x509.NameAttribute(NameOID.COMMON_NAME, cn),\n ]\n if self.settings['csr_country_name']:\n name_attributes.append(\n x509.NameAttribute(\n NameOID.COUNTRY_NAME,\n self.settings['csr_country_name'],\n )\n )\n if self.settings['csr_state_or_province_name']:\n name_attributes.append(\n x509.NameAttribute(\n NameOID.STATE_OR_PROVINCE_NAME,\n self.settings['csr_state_or_province_name'],\n )\n )\n if self.settings['csr_locality_name']:\n name_attributes.append(\n x509.NameAttribute(\n NameOID.LOCALITY_NAME,\n self.settings['csr_locality_name'],\n )\n )\n if self.settings['csr_organization_name']:\n name_attributes.append(\n x509.NameAttribute(\n NameOID.ORGANIZATION_NAME,\n self.settings['csr_organization_name'],\n )\n )\n return x509.Name(name_attributes)", "def subject(self, value):\n\n is_dict = isinstance(value, dict)\n if not isinstance(value, x509.Name) and not is_dict:\n raise TypeError(_pretty_message(\n '''\n subject must be an instance of asn1crypto.x509.Name or a dict,\n not %s\n ''',\n _type_name(value)\n ))\n\n if is_dict:\n value = x509.Name.build(value)\n\n self._subject = value", "def create_name (self):\n return self.create_topic().create_name('Name')", "def subject_name(self, subject_name):\r\n\r\n self._subject_name = subject_name", "def resolve_name(obj, _):\n return obj.name.decode()", "def convert_x509_name(name):\n types = {\n 'country_name': 'C',\n 'state_or_province_name': 'ST',\n 'locality_name': 'L',\n 'organization_name': 'O',\n 'organizational_unit_name': 'OU',\n 'common_name': 'CN',\n 'email_address': 'emailAddress'\n }\n\n return '/'.join(['{}={}'.format(types[attr], name.native[attr]) for attr in name.native])", "def GetDummyName(basename, suffix='', domain='', zero_based=True):\n return _C.GetDummyName(basename, suffix, domain, zero_based)", "def generate_name(host, subject_type_or_type_name):\n id = UniqueId.create_id()\n return '%s/%s.%d' % (host, get_name(subject_type_or_type_name), id)", "def convert_name(self, human_name):\n\n human_name = HumanName(human_name)\n if human_name.suffix:\n self.metadata[\"gutenberg_name_suffix\"] = human_name.suffix\n human_name.suffix = \"\"\n if human_name.nickname:\n # LOGGER.debug(\"%s nickname: %s\", str(human_name), human_name.nickname)\n no_nickname = copy.copy(human_name)\n no_nickname.nickname = \"\"\n first_name_match = re.match(\n re.sub(r\"(([A-Z])[a-z]*[.])\", r\"\\2\\\\w+\", human_name.first, re.UNICODE),\n human_name.nickname,\n re.UNICODE\n )\n # LOGGER.debug(\n # \"%s, %s\",\n # re.sub(\n # r\"(([A-Z])[a-z]*[.])\", r\"\\2\\\\w+\",\n # human_name.first,\n # re.UNICODE\n # ),\n # human_name.nickname\n # )\n if first_name_match and len(first_name_match.group(0)) >= len(human_name.first):\n human_name.first = first_name_match.group(0)\n human_name.nickname = human_name.nickname[len(human_name.first):].strip()\n # LOGGER.debug(\"Adding %s to aliases\", str(no_nickname))\n self.metadata[\"aliases\"] = set([str(no_nickname)])\n middle_name_match = re.match(\n re.sub(r\"(([A-Z])[a-z]*[.])\", r\"\\2\\\\w+\", human_name.middle, re.UNICODE),\n human_name.nickname,\n re.UNICODE\n )\n # LOGGER.debug(\n # \"%s, %s\",\n # re.sub(\n # r\"(([A-Z])[a-z]*[.])\", r\"\\2\\\\w+\",\n # human_name.middle, re.UNICODE\n # ),\n # human_name.nickname\n # )\n if middle_name_match and len(middle_name_match.group(0)) >= len(human_name.middle):\n human_name.middle = middle_name_match.group(0)\n human_name.nickname = human_name.nickname[len(human_name.middle):].strip()\n # LOGGER.debug(\"Adding %s to aliases\", str(no_nickname))\n self.metadata[\"aliases\"].add(str(no_nickname))\n return human_name", "def getSubject(self):\n\n return X501DN.from_POW(self.get_POW().getSubject())", "def getSubject(self):\n\n return X501DN.from_POW(self.get_POW().getSubject())", "def resolve_first_name(obj, _):\n return obj.first_name.decode()", "def test_copy_without_name_change(self):\n subject_copy = copy_subject(self.subject, self.DATA_MODEL, change_name=False)\n self.assertEqual(\"Subject\", subject_copy[\"name\"])", "def get_certificate_name(cert_data) -> str:\r\n if cert_data is None:\r\n return None\r\n\r\n cert = x509.load_pem_x509_certificate(cert_data, default_backend())\r\n for fields in cert.subject:\r\n current = str(fields.oid)\r\n if \"commonName\" in current:\r\n return fields.value", "def create_dns_name ( base_name, name ) :\n return create_r53_name( base_name, name) + '.mse-esp.com'", "def create_internal_dns_name ( base_name, name ) :\n name = name + '.internal'\n return create_dns_name( base_name, name )", "def _get_domain_from_certificate_name(self, cert_name):\n # Remove Let's Encrypt prefix\n cert_name = cert_name.lstrip('le-')\n\n # Remove trailing numbers if present (as last 10 characters)\n name_fragments = cert_name.split('-')\n if len(name_fragments) > 1 and name_fragments[-1].isdigit():\n name_fragments = name_fragments[:-1]\n return '-'.join(name_fragments)", "def get_name(name_or_entity):\n if isinstance(name_or_entity, (Subject, SubjectType)):\n return name_or_entity.name\n elif isinstance(name_or_entity, db.Model):\n return name_or_entity.key().name()\n else:\n return name_or_entity", "def get_from_subject(mesid, mailbox):\n res, data = mailbox.fetch(mesid, 'BODY.PEEK[HEADER.FIELDS (SUBJECT FROM)]')\n if res != 'OK':\n raise RuntimeError('error in fetch call for {}'.format(mesid))\n # Apparently default character set for IMAP is UTF7\n myheads = data[0][1].decode('utf-7')\n name = get_from(myheads)\n\n subject = findall(r'Subject:\\s+(.*)\\r\\n', myheads)[0] # Assume match\n return ' '.join((name, ':', subject))", "def subject_property_name(self):\n subject_property_name = 'subject'\n if 'participant' in self.schemas.keys():\n subject_property_name = 'participant'\n return subject_property_name", "def test_copy_name(self):\n subject_copy = copy_subject(self.subject, self.DATA_MODEL)\n self.assertEqual(\"Subject (copy)\", subject_copy[\"name\"])", "def _ensure_fqdn(self, name):\n if name[-1:] != \".\":\n return \"%s.\" % name\n else:\n return name", "def __init__(self, base):\n if isinstance(base, str):\n self._name = base\n else:\n raise TypeError(NAME_CREATE_ERROR)", "def get_domain_name(self, DomainName: str) -> Dict:\n pass", "def subject_member_create(context, values, session=None):\n memb_ref = models.SubjectMember()\n _subject_member_update(context, memb_ref, values, session=session)\n return _subject_member_format(memb_ref)", "def build_person(self, doc, entity):\n match = self.person_re.match(entity)\n if match and validations.validate_person_name(match.group(self.PERSON_NAME_GROUP)):\n name = match.group(self.PERSON_NAME_GROUP).strip()\n email = match.group(self.PERSON_EMAIL_GROUP)\n if (email is not None) and (len(email) != 0):\n return creationinfo.Person(name=name, email=email.strip())\n else:\n return creationinfo.Person(name=name, email=None)\n else:\n raise SPDXValueError('Failed to extract person name')", "def getSubject(self, record):\n base_subject = super(CustomSMTPHandler, self).getSubject(record)\n try:\n hostname = platform.node()\n # pylint: disable=broad-except\n except Exception:\n hostname = 'Unknown'\n\n return base_subject.format(hostname)", "def get_name(self):\n return self.normalize_name(self.name)", "def fqdn_identifier(fqdn):\n return messages.Identifier(\n typ=messages.IDENTIFIER_FQDN, value=fqdn)", "def parse_name(self, transcript: str) -> None:\n name_match = re.match(\n r\".*(?=Unofficial\\ UNDERGRADUATE\\ ACADEMIC\\ RECORD)\", transcript, RE_OPT\n )\n if not name_match:\n raise ValueError(\"Name not found\")\n self.name = name_match.group(0).strip()", "def make_name(self):\n first, last = \"\", \"\"\n\n def get_first(self):\n \"\"\" Generate a first name \"\"\"\n return \"%s%s\" % (\n weighted_choice([(\"\", 39), (\"We put our faith in \", 1)]),\n choice(self.first_chunks).title()\n )\n\n def get_last(self):\n \"\"\" Generate a last name \"\"\"\n return \"%s%s%s\" % (\n # As per the original list there's a 1/39 (not conting Bob)\n # chance for a 'Mc' prefix to the lastname\n #\n # Can also, with low propability be \"von <lastname>\"\n weighted_choice([(\"\", 35), (\"Mc\", 3), (\"von \", 1)]),\n choice(self.second_chunks).title(),\n choice(self.third_chunks))\n\n # Avoid the first name reappearing in the last name...\n while first.lower() in last.lower():\n first = get_first(self)\n last = get_last(self)\n\n # Always exclaimatory\n return \"%s %s!\" % (first, last)", "def strToFQN(self, x):\n try:\n obj = reflect.namedObject(x)\n fqn = reflect.fullyQualifiedName(obj)\n except:\n return\n return fqn", "def get_name(self, name, namespace=None):\n if namespace is None:\n namespace = self._default_namespace\n return Name(name=name, namespace=namespace)", "def name_create(self, name):\n values = {\n 'name': name,\n }\n return self.create(values).name_get()[0]", "def _set_subject_alt(self, name, values):\n\n if self._subject_alt_name is not None:\n filtered_general_names = []\n for general_name in self._subject_alt_name:\n if general_name.name != name:\n filtered_general_names.append(general_name)\n self._subject_alt_name = x509.GeneralNames(filtered_general_names)\n\n else:\n self._subject_alt_name = x509.GeneralNames()\n\n if values is not None:\n for value in values:\n new_general_name = x509.GeneralName(name=name, value=value)\n self._subject_alt_name.append(new_general_name)\n\n if len(self._subject_alt_name) == 0:\n self._subject_alt_name = None", "def subject(self) -> str:\n return self[\"Sns\"][\"Subject\"]", "def MAKE_NAME(name):\n name = name.replace('$', 'DOLLAR')\n name = name.replace('.', 'DOT')\n if name.startswith('__'):\n return '_X' + name\n elif name[0] in '01234567879':\n return '_' + name\n return name", "def _get_name(self, category, name, snake_case=True):\n if snake_case:\n name = xform_name(name)\n\n return self._renamed.get((category, name), name)", "def _get_domain_for_name(self, name):\n domain = self.connection.lookupByName(name)\n return domain", "def domain_dns_name(self):\n domain_dn = self.get_default_basedn()\n return domain_dn.canonical_str().split('/')[0]", "def subject(self) -> \"str\":\n return self._attrs.get(\"subject\")", "def subject(self) -> \"str\":\n return self._attrs.get(\"subject\")", "def subject(self):\n subject = re.sub(RE_PATTERNS, '', self.header('Subject', ''))\n subject = re.sub(FW_PATTERNS, '', subject)\n return subject.strip()", "def _convert_name(self, name):\n if not self.re_name.match(name):\n org_name = name\n name = self.re_white.sub('_', name)\n name = self.re_alpha.sub('_', name)\n if not self.re_name.match(name):\n name = 'x_' + name2\n self.warn('Converting name <' + org_name + '> to <' + name + '>.')\n return name", "def get_domain_name(self):\n return self.domain_name.get_text()", "def email_to_name(email):\n n = email.split(\"@\")[0].title()\n return n.replace(\".\", \" \")", "def load_name(self, name_att_list):\n attlist = []\n got = set()\n for k, v in name_att_list:\n if k in got and k not in DN_ALLOW_MULTIPLE:\n raise InvalidCertificate(\"Multiple Name keys not allowed: %s\" % (k,))\n oid = DN_CODE_TO_OID[k]\n n = x509.NameAttribute(oid, as_unicode(v))\n attlist.append(n)\n return x509.Name(attlist)", "def subject(self, subject: \"str\"):\n self._attrs[\"subject\"] = subject", "def subject(self, subject: \"str\"):\n self._attrs[\"subject\"] = subject", "def get_obj_name(ra, dec, obj_naming_sys='sdss'):\n\tif obj_naming_sys == 'sdss':\n\t\treturn getSDSSName_fromRADEC(ra, dec)\n\telse: \n\t\traise ValueError(\"[objnaming] obj_naming_sys not recognized\")", "def _create_name(self) -> str:\n return self.stream.__class__.__name__", "def create_domain(self, domain_name):\r\n params = {'DomainName':domain_name}\r\n d = self.get_object('CreateDomain', params, Domain)\r\n d.name = domain_name\r\n return d", "def canonical_name(self, name):\n raise NotImplementedError", "def get_valid_subject_name(self, position):\n \"\"\"\n Arguments:\n position: position of the name in the list\n Returns:\n valid subject's name\n \"\"\"\n assert position < self.n_valid,\\\n \"The total number of validation samples is: %d\" % self.n_valid\n return self.validation_subjects[position]", "def generate(self):\n return Name(forename=choice(self.first_names, p=self.first_name_freq),\n surname=choice(self.last_names, p=self.last_name_freq))", "def create_subject(name: str, title: str, type_: str) -> SubjectID:\n subject = DbSubject(name, title, type_)\n\n db.session.add(subject)\n db.session.commit()\n\n return _db_entity_to_subject(subject)", "def create_domain(self, domain_name):\n params = {'DomainName': domain_name}\n d = self.get_object('CreateDomain', params, Domain)\n d.name = domain_name\n return d", "def get_name(self):\n return m2.x509_extension_get_name(self.x509_ext)", "def get_by_subject(subject_name):\n return filter_by_prefix(Subscription.all(), subject_name + ':')", "def get_name_from_email(email):\r\n individual_name = email.split('@')[0]\r\n parts = individual_name.split('.')\r\n name = \" \".join(parts).title()\r\n return name", "def create_name(length):\n if length <= 0:\n return None\n else:\n return create_random_name(length)", "def get_cert_DNSNames(cert):\n try:\n ext = cert.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME)\n dns_names = ext.value.get_values_for_type(x509.DNSName)\n except Exception:\n raise exception.SysinvException(_(\n \"Failed to get certificate SAN's DNSNames.\"))\n return dns_names", "def name(self, cname: str)->str:\n return self.like(cname, mx=1)[0]['cname']", "def subject(self):\n return self.properties.get(\"subject\", None)", "def get_user_provided_subject_identifier_attrname(self):\n return None", "def objToFQN(self, x):\n try:\n fqn = reflect.fullyQualifiedName(x)\n reflect.namedObject(fqn)\n except:\n return\n return fqn", "def test_record_name(self):\n zone = Zone('test.example.com')\n record = Record(zone, 'test-record', {'type': 'A', 'ttl': 300})\n self.assertEqual(record.name, 'test-record')", "def extract_name(self, name):\n name_oid2code_map = {v: k for k, v in DN_CODE_TO_OID.items()}\n res = []\n for att in name:\n if att.oid not in name_oid2code_map:\n raise InvalidCertificate(\"Unsupported RDN: %s\" % (att,))\n desc = name_oid2code_map[att.oid]\n val = as_unicode(att.value)\n res.append((desc, val))\n return res", "def get_topicname ( base_name, object_type, condition ) :\n return base_name + '-' + object_type.upper( ) + '-' + condition.upper( )", "def __normalize_name(self):\n self.normalized_name = normalizeSimplified(self.name)", "def get_name() -> str:", "def DNSServiceConstructFullName(\n service = None,\n regtype = _NO_DEFAULT, \n domain = _NO_DEFAULT,\n ):\n\n _NO_DEFAULT.check(regtype)\n _NO_DEFAULT.check(domain)\n\n _global_lock.acquire()\n try:\n fullName = _DNSServiceConstructFullName(service, regtype, domain)\n finally:\n _global_lock.release()\n\n return fullName.value.decode('utf-8')", "def name(self):\n return utils.force_name_case(self._name)", "def __init__(self, author_and_email_tuple):\n name, email = author_and_email_tuple\n first_last = name.strip()\n if first_last in names_equivalences:\n first_last = names_equivalences[first_last]\n self.first_last = first_last\n self.email = email.lower()\n last_name_fragment, suffix = self._last_name(name)\n name_sep = name.index(last_name_fragment)\n self.first = name[:name_sep].rstrip()\n self.last = last_name_fragment\n self.suffix = suffix\n if not self.first:\n self.last_first = self.last\n else:\n self.last_first = u', '.join([self.last, self.first])\n if self.suffix:\n self.last_first += u', ' + self.suffix\n if self.last == \"van Rossum\":\n # Special case for our beloved BDFL. :)\n if self.first == \"Guido\":\n self.nick = \"GvR\"\n elif self.first == \"Just\":\n self.nick = \"JvR\"\n else:\n raise ValueError(\"unkown van Rossum %r!\" % self)\n self.last_first += \" (%s)\" % (self.nick,)\n else:\n self.nick = self.last", "def ValidateName(args):\n account = properties.VALUES.core.account.Get(required=True)\n if account.find('@') == -1:\n username = account\n else:\n username = account[0:account.find('@')]\n\n args.name = args.name or username", "def create_Name(var_name, right_hand_side=True, line=0, column=0):\n name = ast.Name()\n name.id = var_name\n name.lineno = line\n name.col_offset = column\n\n if right_hand_side:\n name.ctx = ast.Load()\n else:\n name.ctx = ast.Store()\n\n return name", "def folder_name(subjfolder, newfolder=None, idregex=None):\n log.info('anonymizer.py folder_name {0} {1} {2}'.format(subjfolder.encode('utf-8'),\n newfolder,\n idregex))\n\n subjfolder = get_abspath(subjfolder)\n\n if newfolder is not None:\n log.info('Moving: ' + subjfolder + ' -> ' + newfolder)\n path(subjfolder).move(newfolder)\n\n return newfolder\n\n #newfolder is None, let's find other name\n #get name of the base folder\n basedir = subjfolder.dirname()\n if subjfolder[-1] == os.sep:\n basedir = basedir.dirname()\n\n if idregex is not None:\n subjid = re.search(idregex, subjfolder.basename()).group(0)\n if len(subjid) > 3:\n newfolder = subjid\n else:\n log.error('Could not find \"{0}\" on folder name {1}.'.format(idregex,\n basedir))\n\n #try to guess new folder name from DICOM headers\n if newfolder is None:\n log.info('Reading internal DICOMs PatientID to get new folder name.')\n newfolder = get_patient_mri_id(subjfolder)\n\n if newfolder is None:\n log.error('Could not find a folder name for {0} from DICOMs.'.format(subjfolder))\n return subjfolder\n\n #else:\n #move the subjfolder to the new folder if it has a different name\n newfolder = os.path.join(basedir, newfolder)\n if subjfolder != newfolder:\n log.info('Moving: ' + subjfolder + ' -> ' + newfolder)\n path(subjfolder).move(newfolder)\n\n return newfolder", "def _username_from_name(self, name):\r\n return name.replace(' ', '_')", "def create_uuid3(namespace, name):\n return uuid.uuid3(namespace, six.ensure_str(name))", "def __init__(self, name):\n self.name = name\n self.birthday = None\n self.lastName = name.split(' ')[-1]", "def __init__(self, name):\n self.name = name\n self.birthday = None\n self.lastName = name.split(' ')[-1]", "def __init__(self, name):\n self.name = name\n self.birthday = None\n self.lastName = name.split(' ')[-1]", "def test_name(self):\n node = self.create(ObjectNodeItem, UML.ObjectNode)\n name = node.shape.icon.children[1]\n\n node.subject.name = \"Blah\"\n\n assert \"Blah\" == name.text()", "def _get_subject_alt(self, name):\n\n if self._subject_alt_name is None:\n return []\n\n output = []\n for general_name in self._subject_alt_name:\n if general_name.name == name:\n output.append(general_name.native)\n return output", "def __init__(self, name):\n self.name = name\n self.birthday = None\n self.lastName = name.split(\" \")[-1]", "def to_name(self, to_name):\n self._to_name = to_name", "def make_systematic_name(name):\n return \" \".join(re.findall(r\"([A-Z]+[a-z]*)\", name)).capitalize()", "def create_random_name(length):\n name = (''.join(random.choices(string.ascii_lowercase, k=length)))\n name_capitalized = name.capitalize()\n return name_capitalized", "def __init__(self, name, namespace=None):\n # Normalize: namespace is always defined as a string, possibly empty.\n if namespace is None:\n namespace = ''\n\n if '.' in name:\n # name is absolute, namespace is ignored:\n self._fullname = name\n\n match = _RE_FULL_NAME.match(self._fullname)\n if match is None:\n raise SchemaParseException(\n 'Invalid absolute schema name: %r.' % self._fullname)\n\n self._name = match.group(1)\n self._namespace = self._fullname[:-(len(self._name) + 1)]\n\n else:\n # name is relative, combine with explicit namespace:\n self._name = name\n self._namespace = namespace\n self._fullname = (self._name\n if (not self._namespace) else\n '%s.%s' % (self._namespace, self._name))\n\n # Validate the fullname:\n if _RE_FULL_NAME.match(self._fullname) is None:\n raise SchemaParseException(\n 'Invalid schema name %r inferred from name %r and namespace %r.'\n % (self._fullname, self._name, self._namespace))", "def resolve_last_name(obj, _):\n return obj.last_name.decode()", "def encode_san_dns_names(self, san):\n dns_names = []\n for dns_name in san:\n dns_names.append(x509.DNSName(dns_name))\n return dns_names", "def instantiate_subject(cls, *args, **kwargs) -> Any:\n return cls.subject_type()(*args, **kwargs)", "def get_pretty_subject(cert):\n subject = 'subject=' + _get_pretty_name(cert.get_subject())\n issuer = 'issuer=' + _get_pretty_name(cert.get_issuer())\n return subject + '\\n' + issuer + '\\n'", "def sanitize_name(self):\n self._name = self.get_name().strip()", "def __create_classname(self, fullname):\n return PACKAGE_NAME + \".\" + fullname", "def _FormalizeName(cls, name):\n name = name.replace(\"_\", \"-\").lower()\n name = name[:cls.NAME_LENGTH_LIMIT]\n if name[-1] == \"-\":\n name = name[:-1] + cls.REPLACER\n return name" ]
[ "0.6042233", "0.57407993", "0.5645988", "0.56094193", "0.55473876", "0.53495145", "0.521684", "0.51685303", "0.51498526", "0.5132291", "0.5014182", "0.49951622", "0.49933416", "0.4948473", "0.4948473", "0.48978388", "0.4867619", "0.48654392", "0.48593077", "0.48541546", "0.4819609", "0.48047835", "0.47905287", "0.47883072", "0.47735703", "0.4744937", "0.47377914", "0.47320476", "0.4728312", "0.4710261", "0.46989396", "0.46988952", "0.46960685", "0.46846533", "0.46734434", "0.46703312", "0.46402106", "0.4634201", "0.46331865", "0.4630054", "0.46223575", "0.46138206", "0.4600552", "0.46002465", "0.4591917", "0.4591917", "0.45629174", "0.4562638", "0.45567894", "0.45494157", "0.45492262", "0.45443645", "0.45443645", "0.45384842", "0.4529353", "0.45252582", "0.45051047", "0.45006314", "0.448001", "0.44714934", "0.44517696", "0.44443285", "0.44325003", "0.4431237", "0.44256234", "0.44218612", "0.44181958", "0.44165498", "0.44149503", "0.44116497", "0.43973184", "0.43950117", "0.43946263", "0.43934634", "0.43930212", "0.4390896", "0.43890396", "0.4380505", "0.43777898", "0.4377458", "0.43773085", "0.43644643", "0.43556663", "0.4351315", "0.4351315", "0.4351315", "0.43505684", "0.43500584", "0.43498272", "0.4348768", "0.43477535", "0.43433413", "0.433955", "0.433667", "0.43351498", "0.43292418", "0.432647", "0.4325285", "0.43237028", "0.4320318" ]
0.57814914
1
Converts list of prefixed strings to GeneralName list.
def load_gnames(self, gname_list): gnames = [] for alt in gname_list: if ':' not in alt: raise InvalidCertificate("Invalid gname: %s" % (alt,)) t, val = alt.split(':', 1) t = t.lower().strip() val = val.strip() if t == 'dn': gn = x509.DirectoryName(self.load_name(parse_dn(val))) elif t == 'dns': gn = x509.DNSName(val) elif t == 'email': gn = x509.RFC822Name(val) elif t == 'uri': gn = x509.UniformResourceIdentifier(val) elif t == 'ip': if val.find(':') >= 0: gn = x509.IPAddress(ipaddress.IPv6Address(val)) else: gn = x509.IPAddress(ipaddress.IPv4Address(val)) elif t == 'dn': gn = x509.DirectoryName(self.load_name(parse_dn(val))) elif t == 'net': if val.find(':') >= 0: gn = x509.IPAddress(ipaddress.IPv6Network(val)) else: gn = x509.IPAddress(ipaddress.IPv4Network(val)) else: raise Exception('Invalid GeneralName: ' + alt) gnames.append(gn) return gnames
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_to_names(names):\n names_list = []\n for n in names:\n names_list.append(names[n].details['name'])\n return names_list", "def original_names(names):\n if len(names) > 0 and isinstance(names[0], (list, tuple)):\n names = [x[0] for x in names]\n\n return [re.sub('_+$', '', x) for x in names]", "def standardize_many(self, names: list[str]) -> list[str]:\n assert type(names) == list\n return [self.standardize(name) for name in names]", "def nameList(self):\r\n return [self.name.lower(), self.code] + self._otherNames", "def rem_str(prelist,names):\n \n for prefix in prelist:\n names=[name.replace(prefix,'') for name in names]\n \n return names", "def rem_str(prelist,names):\n\n for prefix in prelist:\n names=[name.replace(prefix,'') for name in names]\n\n return names", "def TransformNames(self) -> _n_2_t_0[str]:", "def _repair_names_universal(\n names: Iterable[str], quiet: bool = False, base0_: bool = None\n) -> List[str]:\n min_names = _repair_names_minimal(names)\n neat_names = [re.sub(r\"[^\\w]\", \"_\", name) for name in min_names]\n new_names = _repair_names_unique(\n neat_names,\n quiet=True,\n sanitizer=lambda name: (\n f\"_{name}\"\n if keyword.iskeyword(name) or (name and name[0].isdigit())\n else name\n ),\n base0_=base0_,\n )\n if not quiet:\n changed_names = [\n (orig_name, new_name)\n for orig_name, new_name in zip(names, new_names)\n if orig_name != new_name\n ]\n _log_changed_names(changed_names)\n return new_names", "def add_prefix_to_list_items(prefix: str, items: list) -> list:\r\n new_items = []\r\n for item in items:\r\n new_items.append(f\"{prefix}{str(item)}\")\r\n return new_items", "def full_names(self) -> List[str]:\n self.names = [\n \".\".join(prod)\n for prod in product(*self._namespaces, self.terminals)\n ]\n return self.names", "def get_names_short(self):\r\n return [p.get_name() for p in self.people]", "def names(cls) -> List[str]:", "def extract_full_names(people):\n result = []\n \n for lst in names:\n x = ''\n for name in lst.values():\n x += ' ' + name \n x = x[1:] \n result.append(x)\n return result", "def names(self) -> list[str]:", "def transform_prefix(filenames, prefix_old, prefix_new):\n\n new_filenames = set([])\n len_prefix_old = len(prefix_old)\n # loop over the list of files and remove the prefix\n for name in filenames:\n name = name[len_prefix_old:]\n new_filenames.add(prefix_new + name)\n\n\n return new_filenames", "def name_list(string):\n names = []\n for name in string.split('; '):\n if ', ' in name:\n last_comma_first = name.split(', ', 2)\n first = last_comma_first[1].strip()\n last = last_comma_first[0].strip()\n names.append(first + \" \" + last)\n else:\n names.append(name.strip())\n return names", "def clean_names(names):\n\n cleaned_names = []\n for name in names:\n name_parts = [n_part for n_part in name.split() if '.' not in n_part]\n cleaned_names.append(\" \".join(name_parts))\n\n return cleaned_names", "def _repair_names_unique(\n names: Sequence[str],\n quiet: bool = False,\n sanitizer: Callable = None,\n base0_: bool = None,\n) -> List[str]:\n base = int(not base0_)\n min_names = _repair_names_minimal(names)\n neat_names = [\n re.sub(r\"(?:(?<!_)_{1,2}\\d+|(?<!_)__)+$\", \"\", name)\n for name in min_names\n ]\n if callable(sanitizer):\n neat_names = [sanitizer(name) for name in neat_names]\n\n new_names = []\n changed_names = []\n for i, name in enumerate(neat_names):\n if neat_names.count(name) > 1 or name == \"\":\n name = f\"{name}__{i + base}\"\n if name != names[i]:\n changed_names.append((names[i], name))\n new_names.append(name)\n if not quiet:\n _log_changed_names(changed_names)\n return new_names", "def personas(self, pretty=True, sort=True):\n names = list(self.name2base)\n if pretty: names = [self.process_name(name, True) for name in names]\n if sort: names = sorted(names)\n return names", "def listit(list, prefix):\n\n l = []\n for x in list:\n l.append(prefix+(x.strip()).rstrip('.fits'))\n return \",\".join(l)", "def get_short_names(self) -> List[str]:\n result = []\n for elements in self._get_results_list():\n result.append(elements[0])\n return result", "def itemnames():\n g = ['KIS_NA_39', 'VII_57', 'MX_48', 'MX_56', 'KIS_NA_42', 'VII_54',\n 'MX_S_48', 'MX_S_52', 'MX_52', 'KIS_NA_45', 'KIS_NA_51', 'MIP_45',\n 'MIP_49', 'MIP_52', 'MIP_plus_48', 'MIP_plus_51', 'MX_42', 'MX_45',\n 'MIP_G_42', 'KIS_42', 'KIS_NA_48']\n return(g)", "def drug_names_on_drug_list(drug_list):\n return [dl[\"Drug (brand name)\"] for dl in drug_list]", "def separate_names (name, alt_name=None):\n\n names = name.split(' ')\n\n # Pop first item in list\n first_name = names.pop(0)\n # middle_name = None\n last_name = None\n\n if len (names):\n # Pop last item of list\n # last_name = names.pop()\n\n # We got rid of middle name so now the rest of the names are last name\n last_name = ' '.join(names)\n\n elif alt_name:\n last_name = alt_name\n\n # if len (names):\n # # Middle name(s) are the rest of the list\n # middle_name = ' '.join(names)\n\n return {\n \"first_name\": first_name,\n # \"middle_name\": middle_name if middle_name else '',\n \"last_name\": last_name if last_name else ''\n }", "def getNames(self) -> List[unicode]:\n ...", "def _prefixed_items_from_list(items: List[namedtuple], item_prefix, prefix, tag_names: Set[str] = set([])):\n result = {}\n for index, nt in enumerate(items):\n result[\"%s%d\" % (item_prefix, index)] = _parse(nt, prefix, tag_names)\n return result", "def formatlist(input_list):\n\n output_list = []\n for item in input_list:\n item = str(item)\n item = item.replace(\" \", \"_\")\n output_list.append(item)\n return output_list", "async def process_prefix_list(\n guild: disnake.Guild,\n ctx: commands.Context = None,\n inter: AppCmdInter = None,\n allowed_mentions=None,\n):\n await create_guild_model(guild)\n guild = await Guild.get(guild.id)\n msg = f\"The following are the custom prefixes for {guild.name}:\\n\" + \", \".join(\n guild.prefixes\n )\n await send_message(msg=msg, ctx=ctx, inter=inter, allowed_mentions=allowed_mentions)", "def name_list(file_name):\n \n li = open(file_name)\n list_of_names = []\n\n for name in li:\n (first,last) = str.split(name,' ')\n list_of_names.append(Name(first,last))\n return list_of_names", "def snake_case_split(string_list):\n result = []\n for string in string_list:\n result.extend([x.lower() for x in string.split('_') if x])\n return result", "def extract_dataset_names(list_of_files, prefix = \"\", suffix = \"\"):\r\n dataset_names = []\r\n for filename in list_of_files:\r\n dataname = re.sub(\"(.*?)(\\.)(\"+prefix+\")(.*)(\"+suffix+\")\", \"\\\\4\", filename)\r\n dataset_names.append(dataname)\r\n assert dataset_names != [], \"dataset_names did not populate\"\r\n return dataset_names", "def names(self):\n if isinstance(self.name, string_types):\n return [self.name]\n else:\n return list(self.name)", "def get_data_list_name(name):\n last = name[-1]\n if last in 'y':\n if last in 'a,e,i,o,u,y':\n name = name[0:-1] + 'ies'\n else:\n name += 's'\n elif last in 'ou':\n name += 'es'\n elif last == 'f':\n name = name[0:-1] + 'ves'\n elif name[-2:-1] == 'fe':\n name = name[0:-2] + 'ves'\n elif last in ['s', 'ss', 'x', 'sh', 'ch']:\n name += 'es'\n else:\n name += 's'\n return name", "def _groupNamesToList(settings):\n return [getattr(GroupName, val) for val in settings.dhGroups]", "def disambiguate(names: list[str], mark: str = \"1\") -> list[str]:\n names_seen = set()\n new_names = []\n for name in names:\n new_name = name\n while new_name in names_seen:\n new_name += mark\n new_names.append(new_name)\n names_seen.add(new_name)\n\n return new_names", "def beginsWithSingleUnderscore( inList ):\n ensureInstance( inList, list, level=2 )\n for anElement in inList:\n assert isinstance( anElement, str ), \"inList must be string\"\n \n outList= []\n \n for anElement in inList:\n if len( anElement ) >= 2 and anElement[0] == \"_\" and anElement[1] != \"_\":\n outList.append( anElement )\n \n return outList", "def take_name(List):\n list_of_names = []\n for i in range(len(List)):\n if isinstance(List[i], Attribute):\n x = List[i]\n list_of_names.append(x.Name)\n elif isinstance(List[i], list):\n list_of_names.append(take_name(List[i]))\n else:\n list_of_names.append(List[i])\n return list_of_names", "def preprocess_data(extracted_data: List[Tuple[str, str]]) -> List[str]:\n return [f'__label__{data[0]} {clean_formatting(data[1])}' for data in extracted_data]", "def transform(self, nameList):\n return {name: self.transformSingle(name) for name in nameList}", "def extract_gnames(self, ext):\n res = []\n for gn in ext:\n if isinstance(gn, x509.RFC822Name):\n res.append('email:' + as_unicode(gn.value))\n elif isinstance(gn, x509.DNSName):\n res.append('dns:' + as_unicode(gn.value))\n elif isinstance(gn, x509.UniformResourceIdentifier):\n res.append('uri:' + as_unicode(gn.value))\n elif isinstance(gn, x509.IPAddress):\n res.append('ip:' + str(gn.value))\n elif isinstance(gn, x509.DirectoryName):\n val = self.extract_name(gn.value)\n res.append('dn:' + render_name(val))\n else:\n raise InvalidCertificate(\"Unsupported subjectAltName type: %s\" % (gn,))\n return res", "def _clean_budget_names(recarray, names):\n newnames = []\n mbnames = [\"TOTAL_IN\", \"TOTAL_OUT\", \"IN-OUT\", \"PERCENT_DISCREPANCY\"]\n for name in names:\n if name in mbnames:\n newnames.append(name)\n elif (\n not name.startswith(\"FROM_\")\n and not name.startswith(\"TO_\")\n and not name.endswith(\"_IN\")\n and not name.endswith(\"_OUT\")\n ):\n newname_in = \"FROM_\" + name.upper()\n newname_out = \"TO_\" + name.upper()\n if newname_in in recarray[\"name\"]:\n newnames.append(newname_in)\n if newname_out in recarray[\"name\"]:\n newnames.append(newname_out)\n else:\n if name in recarray[\"name\"]:\n newnames.append(name)\n return newnames", "def getPeopleNames(the_list):\n new_list = []\n if type(the_list) == list:\n for person in the_list:\n if person['@type'] == \"Person\":\n new_list.append(person['name'])\n else:\n new_list.append(the_list['name'])\n return new_list", "def _addPrefixes(data):\n prevTags = None\n newData = []\n\n for n, (token, tags) in enumerate(data):\n\n newTags = []\n\n for t in tags:\n p = \"B\" if ((prevTags is None) or (t not in prevTags)) else \"I\"\n newTags.append(\"%s-%s\" % (p, t))\n\n newData.append((token, newTags))\n prevTags = tags\n\n return newData", "def prefixCombiner(prefix, itemlist, glue=''):\n result = []\n for item in itemlist:\n result.append(prefix + glue + item)\n return result", "def add_item_to_list(given_list, prefix):\n new_list = []\n if given_list:\n for item in given_list:\n item.lstrip()\n if item.startswith(\"http://\") or item.startswith(\"https://\") or item.startswith(\"//\"):\n if item.startswith(prefix):\n new_list.append(item)\n else:\n new_list.append(prefix + '/' + item)\n return new_list", "def standard_name_remapper(orig_name):\n # Remove any trailing parentheses.\n # TODO(tjann): to check if this is safe.\n paren_start = orig_name.find(\"(\")\n if paren_start != -1:\n orig_name = orig_name[:paren_start]\n\n # Removes separating words.\n orig_name = orig_name.replace(\",\", \" \")\n orig_name = orig_name.replace(\"-\", \" \")\n orig_name = orig_name.replace(\"and \", \"\")\n return \"\".join([word.capitalize() for word in orig_name.split()])", "def parse_input_topgro_names( name ):\n\n #Check whether we're working with a list or prefix\n if not os.path.isfile(name[0]):\n #If the first entry is not a name, then it is probably a prefix\n names = (name + '.top', name + '.gro')\n for n in names:\n assert os.path.isfile(n), \"No such input file %s...\" % n\n return names \n else:\n names = name\n for n in names:\n assert os.path.isfile(n), \"No such input file %s...\" % n\n\n return names", "def generate_name(path_list):\n name = path_list[0]\n for item in path_list[1:]:\n name += \"[\" + item + \"]\"\n return name", "def make_label_names(name_lsit):\n\n hover_label_names = []\n for x in range(len(name_lsit)):\n temp1 = name_lsit[x]\n hover_label_names.append(temp1)\n\n return hover_label_names", "def _list_of_availability_strings():\n names = [availability.name for availability in Availability]\n return names", "def Student_names(l:list)->list:\n result=[]\n for s in l:\n result.append(s.name)\n return result", "def _set_base_namelists(self):\n\n # Create namelists\n hydro_namelist = self.model.hydro_namelists\n hrldas_namelist = self.model.hrldas_namelists\n\n self.base_hydro_namelist = hydro_namelist.patch(self.domain.hydro_namelist_patches)\n self.base_hrldas_namelist = hrldas_namelist.patch(self.domain.hrldas_namelist_patches)", "def fuzzy_name(str_in):\n if isinstance(str_in, str):\n return str_in.lower().replace('_', '').replace('-', '')\n else:\n return [_s.lower().replace('_', '').replace('-', '')\n if isinstance(_s, str) else _s for _s in str_in]", "def keep_lowercase(str_list):", "def get_img_name(dict_needed):\r\n \r\n new_list = []\r\n for i in dict_needed:\r\n new_name = i.split('_')[0]\r\n new_list.append(new_name)\r\n \r\n return new_list", "def split_name(fullname):", "def convertListToString(list):\n return re.sub(r'[^\\w ]', '', str(list))", "def get_uniprot_names(uniprot_result):\n name_lines = [l for l in uniprot_result.split('\\n') if l.startswith('DE')]\n\n names = []\n\n for nm_line in name_lines:\n if 'Full=' in nm_line:\n names.append(nm_line.split('Full=')[-1][:-1])\n elif 'Short=' in nm_line:\n names.append(nm_line.split('Short=')[-1][:-1])\n\n return names", "def get_alternate_names(self, alt_list):\n self.alternates = [a.name for a in alt_list if a.raga == self.name]", "def namelist(self):\n return []", "def format_troops(troops_raw: list):\n troops = set()\n for troop in troops_raw:\n troop = troop.strip()\n troop = troop.replace(\" \", \"_\")\n troop += \".lua\"\n\n if troop != \".lua\":\n troops.add(troop)\n\n return troops", "def names_for(self, name):\n names = [\"%s.%s\" % (self.package, name)]\n if self.prefix:\n names.append(\"%s.%s\" % (self.prefix, names[0]))\n return names", "def prefix_all(value, LL):\n return [[value] + L for L in LL]", "def names(self) -> List:\n ...", "def generate_list_of_names(self):\n names = [donor._full_name for donor in self.donor_list]\n name_selection = \"\\n\".join(\n [\"{}\"] * len(self.donor_list)).format(*names)\n return name_selection", "def getNames():\r\n return [\"Server1\", \"Server2\", \"Client1\", \"Client2\"]", "def format_labels(_labels):\n _ret = []\n if isinstance(_labels, str):\n # put in a list if the label is a string.\n _ret = [_labels]\n elif isinstance(_labels, dict):\n for _key, _item in _labels.items():\n _ret.append(_key.strip().replace(\" \", \"-\").replace(\"_\", \"-\"))\n elif isinstance(_labels, list) or isinstance(_labels, tuple):\n for _item in _labels:\n _ret.append(_item.strip().replace(\" \", \"-\").replace(\"_\", \"-\"))\n return _ret", "def clean_names_list(names):\n pure_names = []\n nan = re.compile('nan', re.IGNORECASE)\n title = re.compile('surname', re.IGNORECASE)\n for name in names:\n if nan.search(name):\n continue\n elif title.search(name):\n continue\n else:\n pure_names.append(name)\n return pure_names", "def compound_names(self) -> List[str]:\n return None", "def extract_names(collection):\n return map(repr, collection)", "def nsrGenera(taxonList, synonymList):\r\n species = list(filter(None, sorted(taxonList + synonymList)))\r\n generaList = [i.split()[0] for i in species]\r\n generaList = list(dict.fromkeys(generaList))\r\n return generaList", "def canonicalize(name):\n prefixes, first_part, last_part, suffixes = split(name)\n canonical = \"\"\n if prefixes:\n canonical = namecase(prefixes)\n if first_part:\n canonical += \" \" + namecase(first_part)\n if last_part:\n canonical += \" \" + namecase(last_part)\n if suffixes:\n canonical += \", \" + namecase(suffixes)\n return canonical.strip()", "def replace_special_characters_in_list(self, full_list):\n return [n.replace(':','%3A') for n in full_list]", "def get_nice_names(self) -> List[str]:\n result = []\n for elements in self._get_results_list():\n result.append(elements[1])\n return result", "def add_prefix(prefix = \"Peptides\"):\n var_list = gen_cell_lines_states_replicates()\n prefix = prefix\n res_list = []\n for i in var_list:\n unit_str = prefix + \" \"\n unit_str += i\n res_list.append(unit_str)\n return res_list", "def genSufList():\n global gConst\n\n sufChrList = []\n for suffix in gConst['picSufList']:\n for c in suffix:\n sufChrList.append(c)\n\n sufChrList = crifanLib.crifanList.uniqueList(sufChrList)\n # sufChrList = uniqueList(sufChrList)\n sufChrList.sort()\n joinedSuf = ''.join(sufChrList)\n swappedSuf = joinedSuf.swapcase()\n wholeSuf = joinedSuf + swappedSuf\n\n return wholeSuf", "def normalize_name(cls, name):\n\t\treturn ' '.join(name.lower().strip().split())", "def normalize_name(cls, name):\n\t\treturn ' '.join(name.lower().strip().split())", "def normalize_name(cls, name):\n\t\treturn ' '.join(name.lower().strip().split())", "def extract_names(collection):\n return (\n '[{}]'.format(', '.join(map(repr, groups[n].entity_names)))\n if n in groups else repr(n) for n in collection\n )", "def __get_names(record: TNSRecord) -> Dict[str, str]:\n aliases = {'iau': record.name}\n internal_names = record.internal_names.split(',')\n for provider, pattern in Object.name_patterns.items():\n for name in internal_names:\n if pattern.match(name):\n aliases[provider] = name\n return aliases", "def get_name_list(msh, varname):\n return [str(chartostring(v)) for v in msh.variables[varname]]", "def make_name(self):\n first, last = \"\", \"\"\n\n def get_first(self):\n \"\"\" Generate a first name \"\"\"\n return \"%s%s\" % (\n weighted_choice([(\"\", 39), (\"We put our faith in \", 1)]),\n choice(self.first_chunks).title()\n )\n\n def get_last(self):\n \"\"\" Generate a last name \"\"\"\n return \"%s%s%s\" % (\n # As per the original list there's a 1/39 (not conting Bob)\n # chance for a 'Mc' prefix to the lastname\n #\n # Can also, with low propability be \"von <lastname>\"\n weighted_choice([(\"\", 35), (\"Mc\", 3), (\"von \", 1)]),\n choice(self.second_chunks).title(),\n choice(self.third_chunks))\n\n # Avoid the first name reappearing in the last name...\n while first.lower() in last.lower():\n first = get_first(self)\n last = get_last(self)\n\n # Always exclaimatory\n return \"%s %s!\" % (first, last)", "def _make_name(words):\n return \" \".join(words)", "def names(self):\n if type(self.name) is types.StringType:\n return [self.name]\n else:\n return list(self.name)", "def extract_names(register):\n names = []\n for i in range(len(register) - 1): # len() -> no of columns\n first_name = str(register.iloc[i][2]).capitalize()\n last_name = str(register.iloc[i][1]).upper()\n name = last_name + ' ' + first_name\n names.append(name)\n names = list(set(names))\n return names", "def variable_parser(var_list, prefix):\r\n ret_list = []\r\n for var in var_list:\r\n varname = var.name\r\n varprefix = varname.split('/')[0]\r\n if varprefix == prefix:\r\n ret_list.append(var)\r\n elif prefix in varname:\r\n ret_list.append(var)\r\n return ret_list", "def process_names( names ):\n\tp_list = []\n\tfor i in xrange( len( names ) ):\n\t\t#print i\n\t\tp_list.append( str(i) + \"__\" + names[i] )\n\n\tRV = \";\".join(p_list)\n\treturn( RV )", "def create_short_database_names(path_list):\n no_suffixes = [Path(p).resolve().with_suffix(\"\") for p in path_list]\n # The assert statement makes sure that the while loop terminates\n assert len(set(no_suffixes)) == len(\n no_suffixes\n ), \"path_list must not contain duplicates.\"\n short_name_to_path = {}\n for path, path_with_suffix in zip(no_suffixes, path_list):\n parts = tuple(reversed(path.parts))\n needed_parts = 1\n candidate = parts[:needed_parts]\n while _causes_name_clash(candidate, no_suffixes):\n needed_parts += 1\n candidate = parts[:needed_parts]\n\n short_name = \"/\".join(reversed(candidate))\n short_name_to_path[short_name] = path_with_suffix\n return short_name_to_path", "def get_filtered_file_names_list(_file_names_list, _gender=None, _noise=None, _scale=None):\n _file_names_split_list = [re.split('[/_]+', fname) for fname in _file_names_list]\n\n if _gender:\n if type(_gender) == str:\n _gender = [_gender]\n _file_names_split_list = [f_name for f_name in _file_names_split_list if f_name[-3] in _gender]\n\n if _noise:\n if type(_noise) == str:\n _noise = [_noise]\n _file_names_split_list = [f_name for f_name in _file_names_split_list if f_name[-2] in _noise]\n\n if _scale:\n if type(_scale) == str:\n _scale = [_scale]\n _file_names_split_list = [f_name for f_name in _file_names_split_list if f_name[-1] in _scale]\n\n _file_names_list = ['_'.join(['/'.join(fname_split[:3]), fname_split[-2], fname_split[-1]])\n for fname_split in _file_names_split_list]\n\n return _file_names_list", "def origin_renames(self):\n try:\n return [tup[0] for tup in sorted(self.destination.renames)]\n except AttributeError:\n return []", "def namelist(self):\n return self._handle.namelist()", "def namelist(self):\n return self._handle.namelist()", "def namelist(self):\n\n # try to create a name from the archive name\n # because a gzipped file doesn't have information about the\n # original filename\n # gzipping a file creates the archive name by appending \".gz\"\n genericfilename = self._archivename\n\n if not genericfilename:\n genericfilename = \"generic.unknown.gz\"\n\n try:\n # get list of file extensions\n fileendinglist = Archivehandle.avail_archive_extensionlist4type['gz']\n replacedict = {\"wmz\": \"wmf\",\n \"emz\": \"emf\"}\n for ending in fileendinglist:\n endingwithdot = \".\"+ending\n if genericfilename.endswith(endingwithdot):\n if ending in replacedict:\n genericfilename = genericfilename[:-len(ending)]+replacedict[ending]\n else:\n genericfilename = genericfilename[:-len(endingwithdot)]\n break\n\n except Exception as e:\n print(e)\n pass\n return [genericfilename]", "def name_components(self) -> typing.List[str]:\n return self._name.split(CompositeType.NAME_COMPONENT_SEPARATOR)", "def glyphSet2NameSet(f, glyphSet, skipExtensions=None):\n if skipExtensions is None:\n skipExtensions = ('sc',)\n nameSet = set()\n for g in f:\n gName = g.name\n nameParts = gName.split('.')\n baseName = nameParts[0]\n if len(nameParts) > 1:\n extension = '.'.join(nameParts[1:])\n else:\n extension = None\n if extension and extension in skipExtensions:\n continue\n if baseName in f:\n baseGlyph = f[baseName]\n if (baseGlyph.unicode and chr(baseGlyph.unicode) in glyphSet) or baseName in glyphSet:\n nameSet.add(g.name)\n nameSet.add(baseName)\n elif (g.unicode and chr(g.unicode) in glyphSet) or gName in glyphSet:\n nameSet.add(g.name)\n return nameSet", "def standardize_name_for_look_up(name: Any) -> str:\n if not isinstance(name, str):\n return name\n\n name = name.lower().strip()\n name = \" \".join(name.split(\"_\"))\n name = name.translate(\n str.maketrans(\"\", \"\", string.punctuation)\n ) # remove punctuation\n name = \" \".join(\n [part for part in name.split(\" \") if part]\n ) # ensure there is only a single space between words\n return name", "def named_entities(self) -> List[str]:", "def _rectify_names(infr, old_names, new_labels):\n infr.print('rectifying name lists', 3)\n from wbia.scripts import name_recitifer\n\n newlabel_to_oldnames = ut.group_items(old_names, new_labels)\n unique_newlabels = list(newlabel_to_oldnames.keys())\n grouped_oldnames_ = ut.take(newlabel_to_oldnames, unique_newlabels)\n # Mark annots that are unknown and still grouped by themselves\n still_unknown = [len(g) == 1 and g[0] is None for g in grouped_oldnames_]\n # Remove nones for name rectifier\n grouped_oldnames = [\n [n for n in oldgroup if n is not None] for oldgroup in grouped_oldnames_\n ]\n new_names = name_recitifer.find_consistent_labeling(\n grouped_oldnames, verbose=infr.verbose >= 3, extra_prefix=None\n )\n\n unknown_labels = ut.compress(unique_newlabels, still_unknown)\n\n new_flags = [n is None for n in new_names]\n # isinstance(n, str) and n.startswith('_extra_name')\n # for n in new_names\n # ]\n label_to_name = ut.dzip(unique_newlabels, new_names)\n needs_assign = ut.compress(unique_newlabels, new_flags)\n return label_to_name, needs_assign, unknown_labels", "def Names():\n for i in range(ida_name.get_nlist_size()):\n ea = ida_name.get_nlist_ea(i)\n name = ida_name.get_nlist_name(i)\n yield (ea, name)" ]
[ "0.65021443", "0.6432721", "0.61846787", "0.6088756", "0.6081885", "0.6031575", "0.6005844", "0.59734565", "0.5912184", "0.58610666", "0.5832652", "0.5829637", "0.58152074", "0.574755", "0.5740378", "0.5698568", "0.5694689", "0.56923187", "0.5686958", "0.56855613", "0.56847", "0.56621784", "0.56090665", "0.5603594", "0.5585332", "0.5570726", "0.5551596", "0.55451006", "0.5542647", "0.5530685", "0.5520771", "0.5492281", "0.5479832", "0.54589856", "0.5454043", "0.5443165", "0.5438131", "0.541579", "0.5415281", "0.5390061", "0.5377768", "0.5376145", "0.5361926", "0.53556174", "0.5351929", "0.5338719", "0.5329107", "0.5328744", "0.53286237", "0.5326453", "0.5309702", "0.5307506", "0.5307091", "0.5293116", "0.528723", "0.5282175", "0.5279062", "0.5270541", "0.525679", "0.52462244", "0.52410173", "0.5240426", "0.5234585", "0.523427", "0.52330077", "0.5225351", "0.5221343", "0.5218162", "0.5212371", "0.5208992", "0.5208145", "0.52030927", "0.51983154", "0.5184106", "0.5179823", "0.5178791", "0.5165179", "0.5165179", "0.5165179", "0.51633286", "0.5145582", "0.51448464", "0.5144637", "0.5133344", "0.51315105", "0.51264226", "0.5116079", "0.5114202", "0.51132905", "0.51005083", "0.509564", "0.50911963", "0.50911963", "0.509086", "0.50904024", "0.5090147", "0.50886714", "0.50794274", "0.5077923", "0.5063778" ]
0.6499037
1
Return SubjectAltNames as GeneralNames
def get_san_gnames(self): return self.load_gnames(self.san)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_subject_alt(self, name):\n\n if self._subject_alt_name is None:\n return []\n\n output = []\n for general_name in self._subject_alt_name:\n if general_name.name == name:\n output.append(general_name.native)\n return output", "def subject_alt_emails(self):\n\n return self._get_subject_alt('rfc822_name')", "def subject_alt_domains(self):\n\n return self._get_subject_alt('dns_name')", "def extract_gnames(self, ext):\n res = []\n for gn in ext:\n if isinstance(gn, x509.RFC822Name):\n res.append('email:' + as_unicode(gn.value))\n elif isinstance(gn, x509.DNSName):\n res.append('dns:' + as_unicode(gn.value))\n elif isinstance(gn, x509.UniformResourceIdentifier):\n res.append('uri:' + as_unicode(gn.value))\n elif isinstance(gn, x509.IPAddress):\n res.append('ip:' + str(gn.value))\n elif isinstance(gn, x509.DirectoryName):\n val = self.extract_name(gn.value)\n res.append('dn:' + render_name(val))\n else:\n raise InvalidCertificate(\"Unsupported subjectAltName type: %s\" % (gn,))\n return res", "def _set_subject_alt(self, name, values):\n\n if self._subject_alt_name is not None:\n filtered_general_names = []\n for general_name in self._subject_alt_name:\n if general_name.name != name:\n filtered_general_names.append(general_name)\n self._subject_alt_name = x509.GeneralNames(filtered_general_names)\n\n else:\n self._subject_alt_name = x509.GeneralNames()\n\n if values is not None:\n for value in values:\n new_general_name = x509.GeneralName(name=name, value=value)\n self._subject_alt_name.append(new_general_name)\n\n if len(self._subject_alt_name) == 0:\n self._subject_alt_name = None", "def AlternativeNames(self, default=[None]):\n return self.data.get('alternative_names', default)", "def subject_alt_uris(self):\n\n return self._get_subject_alt('uniform_resource_identifier')", "def xff_alternative_names(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"xff_alternative_names\")", "def subject_alt_ips(self):\n\n return self._get_subject_alt('ip_address')", "def get_cert_DNSNames(cert):\n try:\n ext = cert.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME)\n dns_names = ext.value.get_values_for_type(x509.DNSName)\n except Exception:\n raise exception.SysinvException(_(\n \"Failed to get certificate SAN's DNSNames.\"))\n return dns_names", "def get_names_short(self):\r\n return [p.get_name() for p in self.people]", "def get_uniprot_names(uniprot_result):\n name_lines = [l for l in uniprot_result.split('\\n') if l.startswith('DE')]\n\n names = []\n\n for nm_line in name_lines:\n if 'Full=' in nm_line:\n names.append(nm_line.split('Full=')[-1][:-1])\n elif 'Short=' in nm_line:\n names.append(nm_line.split('Short=')[-1][:-1])\n\n return names", "def typedAntennaNames() :\n a=s.getAntennaAssignments()\n namelist = []\n for i in a:\n namelist.append( i.typedAntennaName )\n return namelist", "def get_name(self):\n return m2.x509_extension_get_name(self.x509_ext)", "def make_subject(res):\n subj_dict = {\n \"Agricultural and Biological Sciences (miscellaneous)\": \"Agricultural and Biological Sciences\",\n \"Unsorted\": \"Unsorted\"}\n keys = subj_dict.keys()\n s = res['subject'] if \"subject\" in res.keys() else [\"Unsorted\"]\n \n return [subj_dict[x] if x in keys else x for x in s]", "def load_gnames(self, gname_list):\n gnames = []\n for alt in gname_list:\n if ':' not in alt:\n raise InvalidCertificate(\"Invalid gname: %s\" % (alt,))\n t, val = alt.split(':', 1)\n t = t.lower().strip()\n val = val.strip()\n if t == 'dn':\n gn = x509.DirectoryName(self.load_name(parse_dn(val)))\n elif t == 'dns':\n gn = x509.DNSName(val)\n elif t == 'email':\n gn = x509.RFC822Name(val)\n elif t == 'uri':\n gn = x509.UniformResourceIdentifier(val)\n elif t == 'ip':\n if val.find(':') >= 0:\n gn = x509.IPAddress(ipaddress.IPv6Address(val))\n else:\n gn = x509.IPAddress(ipaddress.IPv4Address(val))\n elif t == 'dn':\n gn = x509.DirectoryName(self.load_name(parse_dn(val)))\n elif t == 'net':\n if val.find(':') >= 0:\n gn = x509.IPAddress(ipaddress.IPv6Network(val))\n else:\n gn = x509.IPAddress(ipaddress.IPv4Network(val))\n else:\n raise Exception('Invalid GeneralName: ' + alt)\n gnames.append(gn)\n return gnames", "def get_issuer_urls_gnames(self):\n urls = ['uri:' + u for u in self.issuer_urls]\n return self.load_gnames(urls)", "def xff_alternative_names(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"xff_alternative_names\")", "def xff_alternative_names(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"xff_alternative_names\")", "def get_subject_cn(self):\n subject = self.get_subject()\n cns = subject.get_entries_by_oid(name.OID_commonName)\n return [cn.get_value() for cn in cns]", "def __get_names(record: TNSRecord) -> Dict[str, str]:\n aliases = {'iau': record.name}\n internal_names = record.internal_names.split(',')\n for provider, pattern in Object.name_patterns.items():\n for name in internal_names:\n if pattern.match(name):\n aliases[provider] = name\n return aliases", "def get_subject(self):\n ri = self.get_request_info()\n if ri['subject'] is None:\n ri['subject'] = None\n # setup first RDN sequence\n ri['subject'][0] = None\n\n subject = ri['subject'][0]\n return name.X509Name(subject)", "def test_100(self):\n primary_str, equivalent_set = gmn.app.middleware.session_cert.get_authenticated_subjects(\n self.cert_simple_subject_info_pem\n )\n self.assertEqual(\n primary_str,\n 'CN=Roger Dahl A1779,O=Google,C=US,DC=cilogon,DC=org',\n )\n self.assertListEqual(\n sorted(equivalent_set),\n [\n 'CN=Roger Dahl A1779,O=Google,C=US,DC=cilogon,DC=org',\n 'authenticatedUser',\n 'public',\n 'verifiedUser',\n ],\n )", "def get_pretty_subject(cert):\n subject = 'subject=' + _get_pretty_name(cert.get_subject())\n issuer = 'issuer=' + _get_pretty_name(cert.get_issuer())\n return subject + '\\n' + issuer + '\\n'", "def extract_subject_names(file_names):\n return file_names.apply(lambda name: name.split('_')[1])", "def show_all_subjects(self):\n self.load_subjects_in_twSubjects([self.pj[SUBJECTS][x][\"name\"] for x in self.pj[SUBJECTS]])", "def get_name(self):\n return self.load_name(self.subject)", "def extract_subjects(subject_info_xml, primary_str):\n subject_info_pyxb = deserialize_subject_info(subject_info_xml)\n subject_info_tree = d1_common.cert.subject_info.gen_subject_info_tree(\n subject_info_pyxb, primary_str\n )\n return subject_info_tree.get_subject_set()", "def issuer_alt_name_value(self):\n\n if self._processed_extensions is False:\n self._set_extensions()\n return self._issuer_alt_name_value", "def extract_names(collection):\n return (\n '[{}]'.format(', '.join(map(repr, groups[n].entity_names)))\n if n in groups else repr(n) for n in collection\n )", "def getSubject(self):\n\n return X501DN.from_POW(self.get_POW().getSubject())", "def getSubject(self):\n\n return X501DN.from_POW(self.get_POW().getSubject())", "def get_uniprot_gene_info(uniprot_result):\n gene_lines = [l for l in uniprot_result.split('\\n') if l.startswith('GN')]\n\n gene_names = []\n\n for gn_line in gene_lines:\n parts = gn_line[2:].split(';')\n for p in parts:\n p = p.strip()\n if p.startswith('Name='):\n gene_names.append(p[5:])\n elif p.startswith('Synonyms='):\n gene_names += [s.strip() for s in p[9:].split(',')]\n\n return gene_names", "def subject(self) -> \"str\":\n return self._attrs.get(\"subject\")", "def subject(self) -> \"str\":\n return self._attrs.get(\"subject\")", "def get_csr_san(self, csr):\n dns_names = []\n try:\n san = csr.extensions.get_extension_for_class(\n x509.SubjectAlternativeName\n )\n except ExtensionNotFound:\n san = None\n if san:\n for dns_name in san.value:\n dns_names.append(dns_name.value)\n return dns_names", "def subject_list():\n items = []\n\n soup = abcradionational.get_soup(URL + \"/podcasts/subjects\")\n \n subject_heading = abcradionational.get_podcast_heading(soup)\n \n for subject in subject_heading:\n items.append({\n 'label': subject['title'],\n 'path': plugin.url_for('subject_item', url=subject['url']),\n })\n\n return items", "def test_cn_ids_are_used_as_fallback(self):\n with pytest.warns(SubjectAltNameWarning):\n rv = extract_ids(X509_CN_ONLY)\n assert [\n DNSPattern(b\"www.microsoft.com\")\n ] == rv", "def encode_san_dns_names(self, san):\n dns_names = []\n for dns_name in san:\n dns_names.append(x509.DNSName(dns_name))\n return dns_names", "def _decode_multiple_subject(self, decoded: str) -> Set[str]:\n\n result = set()\n\n rematch = self._regex_helper.set_regex(r\"((?:[^~\\*,]+))\").match(\n decoded, rematch=True, return_match=True\n )\n\n if rematch:\n result.update({self.extract_base(x) for x in rematch})\n\n return result", "def recode_subject_names(subject_names, masks):\n subject_names = recode_dashed_alphas(subject_names, masks)\n subject_names = recode_dashed_dots(subject_names, masks)\n subject_names = recode_fam_letters(subject_names, masks)\n\n return subject_names", "def target_lang_titles(self):\n return self.target_lang_topics.keys()", "def getSubject(self):\r\n return self.msg[\"Subject\"]", "def TransformNames(self) -> _n_2_t_0[str]:", "def get_certificate_name(cert_data) -> str:\r\n if cert_data is None:\r\n return None\r\n\r\n cert = x509.load_pem_x509_certificate(cert_data, default_backend())\r\n for fields in cert.subject:\r\n current = str(fields.oid)\r\n if \"commonName\" in current:\r\n return fields.value", "def askExtraInfo(self, subject, possibleTopics):\n result = \"Mulle tundub, et sa tahtsid küsida infot \" + subject + \" kohta. \" + subject.capitalize() + \" kohta \" \\\n \"saad \" \\\n \"küsida \"\n if len(possibleTopics) == 1:\n result += possibleTopics[0] + \".\"\n else:\n for i in possibleTopics[:-2]:\n result += i + \", \"\n result += possibleTopics[-2] + \" ja \"\n result += possibleTopics[-1] + \".\"\n result += \"\\nPalun täpsusta!\"\n return result", "def convert_x509_name(name):\n types = {\n 'country_name': 'C',\n 'state_or_province_name': 'ST',\n 'locality_name': 'L',\n 'organization_name': 'O',\n 'organizational_unit_name': 'OU',\n 'common_name': 'CN',\n 'email_address': 'emailAddress'\n }\n\n return '/'.join(['{}={}'.format(types[attr], name.native[attr]) for attr in name.native])", "def subject(self):\n return self.get(\"subject\")", "def subject(self):\n return self.properties.get(\"subject\", None)", "def get_labels(subject, filters=['*wang2015atlas*', '*JWDG.lr*'],\n annotations=['HCPMMP1'], sdir=None):\n global subjects_dir\n import glob\n if sdir is not None:\n subject_dir = sdir\n else:\n subject_dir = subjects_dir\n\n labels = []\n for filter in filters:\n labels += glob.glob(join(subject_dir, subject, 'label', filter))\n labels = [mne.read_label(label, subject) for label in labels]\n for annotation in annotations:\n annot = mne.read_labels_from_annot(\n subject, parc=annotation, subjects_dir=subject_dir)\n annot = [a for a in annot if not '???' in a.name]\n labels.extend(annot)\n return labels", "def nametitles(cls) -> t.List[NameTitle]:\n return [label for label in cls.values() if isinstance(label, tuple)]", "def get_alternate_names(self, alt_list):\n self.alternates = [a.name for a in alt_list if a.raga == self.name]", "def tags(self):\n tagexp = re.compile(r\"\\[([^\\]]*)\\]\")\n subject = self['Subject']\n return tagexp.findall(subject)", "def get_patient_name(patient_bundle):\n names = patient_bundle['name']\n for name in names:\n if (name['use'] == 'official'):\n str = ''\n for g in name['given']:\n str += g + ' '\n str += name['family']\n # 'Rita460 Schowalter414'??\n return str", "def extract_name(self, name):\n name_oid2code_map = {v: k for k, v in DN_CODE_TO_OID.items()}\n res = []\n for att in name:\n if att.oid not in name_oid2code_map:\n raise InvalidCertificate(\"Unsupported RDN: %s\" % (att,))\n desc = name_oid2code_map[att.oid]\n val = as_unicode(att.value)\n res.append((desc, val))\n return res", "def getNames(self) -> List[unicode]:\n ...", "def subject(self):\n subject = re.sub(RE_PATTERNS, '', self.header('Subject', ''))\n subject = re.sub(FW_PATTERNS, '', subject)\n return subject.strip()", "def subject_property_name(self):\n subject_property_name = 'subject'\n if 'participant' in self.schemas.keys():\n subject_property_name = 'participant'\n return subject_property_name", "def subject(self) -> str:\n return self[\"Sns\"][\"Subject\"]", "def subject(self):\n subject = loader.render_to_string(self.subject_template_name,\n self.get_context())\n return ''.join(subject.splitlines())", "def currentAntennaNames(carmaOnly=False) :\n a=s.getAntennaAssignments()\n namelist = []\n for i in a:\n cname = i.carmaAntennaName\n tname = i.typedAntennaName\n if (carmaOnly) :\n names = i.carmaAntennaName\n else :\n names = \"%s(%s)\" %(cname,tname)\n namelist.append(names)\n return namelist", "def GetAltForEncoding(*args, **kwargs):\n return _gdi_.FontMapper_GetAltForEncoding(*args, **kwargs)", "def xforwardedforclientcertsubjectdnalias(self) -> str:\n return pulumi.get(self, \"xforwardedforclientcertsubjectdnalias\")", "def get_alternative_name(self, file_root, file_ext):\n return \"%s_%s%s\" % (file_root, get_random_string(7), file_ext)", "def get_user_provided_subject_identifier_attrname(self):\n return None", "def natural_key(self):\n return (self.email_subscription_name)", "def subjects(self):\n return self.cache.subjects()", "def alternate_name(self) -> str:\n return self._alternate_name", "def test_alt_name_request(self):\n oim = OIM()\n hostname = 'test.' + DOMAIN\n san = 'test-san.' + DOMAIN\n san2 = 'test-san2.' + DOMAIN\n rc, _, _, msg = oim.request('--hostname', hostname,\n '--altname', san,\n '--altname', san2)\n self.assertEqual(rc, 0, \"Failed to request certificate\\n%s\" % msg)\n self.assert_(oim.reqid != '', msg)", "def itemnames():\n g = ['KIS_NA_39', 'VII_57', 'MX_48', 'MX_56', 'KIS_NA_42', 'VII_54',\n 'MX_S_48', 'MX_S_52', 'MX_52', 'KIS_NA_45', 'KIS_NA_51', 'MIP_45',\n 'MIP_49', 'MIP_52', 'MIP_plus_48', 'MIP_plus_51', 'MX_42', 'MX_45',\n 'MIP_G_42', 'KIS_42', 'KIS_NA_48']\n return(g)", "def subject(self):\n return self.mail.get('Subject')", "def get_subject_set(self):\n return self.get_label_set(SUBJECT_NODE_TAG)", "def get_tagged_titles(ttls_lnks):\n\ttagged_titles = []\n\tfor title, link in ttls_lnks:\n\t\t# get the html tree for the paper's page\n\t\tpaper_tree = get_tree(link)\n\t\tpath = '//table/tr/th[text() = \"Subjects:\"]'\n\t\t# Check if html contains the table header \"Subjects:\"\n\t\tsubject_th = paper_tree.xpath(path)\n\t\t# If it does, this means paper is tagged so add to the list to be returned\n\t\tif subject_th:\n\t\t\ttagged_titles.append(title)\n\n\treturn tagged_titles", "def set_subject(self):\n\t\tfrom email.errors import HeaderParseError\n\t\ttry:\n\t\t\t_subject = decode_header(self.mail.get(\"Subject\", \"No Subject\"))\n\t\t\tself.subject = _subject[0][0] or \"\"\n\t\t\n\t\t\tif _subject[0][1]:\n\t\t\t\tself.subject = self.subject.decode(_subject[0][1])\n\t\t\telse:\n\t\t\t\t# assume that the encoding is utf-8\n\t\t\t\tself.subject = self.subject.decode(\"utf-8\")[:140]\n\t\texcept (UnicodeDecodeError, HeaderParseError):\n\t\t\t#try:\n\t\t\t#\tself.subject = self.subject.decode(\"gb18030\")\n\t\t\t#except UnicodeDecodeError:\n\t\t\tself.subject = u'Error Decoding Subject'\n\t\t#if self.subject and len(self.subject)>140:\n\t\t#\tself.subject = self.subject[:135]\n\t\timport re\n\n\t\temoji_pattern = re.compile(\"[\"\n u\"\\U0001F600-\\U0001F64F\" # emoticons\n u\"\\U0001F300-\\U0001F5FF\" # symbols & pictographs\n u\"\\U0001F680-\\U0001F6FF\" # transport & map symbols\n u\"\\U0001F1E0-\\U0001F1FF\" # flags (iOS)\n \"]+\", flags=re.UNICODE)\n\t\tself.subject = emoji_pattern.sub(r'', self.subject)\n\n\t\tif not self.subject:\n\t\t\tself.subject = \"No Subject\"", "def _merge_ensembl_aliases_with_ucsc():\n result = {}\n for ucsc_name, ensembl_name in ucsc_to_ensembl_reference_names.items():\n result[ensembl_name] = [ucsc_name] + \\\n ensembl_reference_aliases.get(ensembl_name, [])\n return result", "def get_all_names(self):\r\n return [person.name for person in self.__person_repository.elements]", "def ldap_get_intro_members():\n return _ldap_get_group_members('intromembers')", "def names(self) -> list[str]:", "def recode_fam_letters(subject_names, masks):\n subject_names = subject_names.copy()\n subject_names[masks.last_alpha] = subject_names[masks.last_alpha].apply(translate_fam_letter)\n\n return subject_names", "def _extract_subjects(self, subjects):\n self._logger.debug(\"Started extracting subjects metadata\")\n\n subject_metadata_list = []\n\n for subject in subjects:\n self._logger.debug(\n \"Started extracting subject metadata from {0}\".format(encode(subject))\n )\n\n scheme = subject.scheme\n\n subject_type = Subject.by_uri.get(scheme)\n if not subject_type:\n # We can't represent this subject because we don't\n # know its scheme. Just treat it as a tag.\n subject_type = Subject.TAG\n\n subject_metadata = SubjectData(\n type=subject_type, identifier=subject.code, name=subject.name, weight=1\n )\n\n subject_metadata_list.append(subject_metadata)\n\n self._logger.debug(\n \"Finished extracting subject metadata from {0}: {1}\".format(\n encode(subject), encode(subject_metadata)\n )\n )\n\n self._logger.debug(\n \"Finished extracting subjects metadata: {0}\".format(\n encode(subject_metadata_list)\n )\n )\n\n return subject_metadata_list", "def get_descriptive_name(self):\r\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\r\n #Mostrar_Grande = long_name.upper()\r\n #return long_name.upper()\r\n #return Mostrar_Grande #Funciona Com Return TAMBÉM, mas olhe na linha 39 como seria necessário usar.\r\n print(long_name.upper())", "def subject(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"subject\")", "def get_crl_gnames(self):\n urls = ['uri:' + u for u in self.crl_urls]\n return self.load_gnames(urls)", "def alt_authors(self, key, value):\n _authors = self.get(\"authors\", [])\n if _authors:\n for i, v in enumerate(force_list(value)):\n _authors[i].update({\"alternative_names\": clean_val(\"a\", v, str)})\n return _authors", "def subjects(self):\n if not self._subjects:\n self._subjects = [subject_factory(s, workspace=self, samples=self.samples) for s in self._get_entities(self.subject_property_name)]\n return self._subjects", "def get_cryptomatte_names(self):\n return [self.cryptomattes[x][\"name\"] for x in self.cryptomattes]", "def _get_primary_cn(tls_cert):\n return cert_parser.get_host_names(tls_cert)['cn']", "def names(self) -> Sequence[str]:\n return pulumi.get(self, \"names\")", "def names(self) -> Sequence[str]:\n return pulumi.get(self, \"names\")", "def extract_names(collection):\n return map(repr, collection)", "def get_subjects_urls(self, subjects: Iterable[Subject]) -> List[str]:\n self.logger.debug('Finding subjects urls.')\n all_rows = self.browser.find_elements(*MaterialLocators.SUBJECT_ROW)\n subjects = {(s.name.strip('. '), s.teacher.strip('. ')) for s in subjects}\n subjects_urls = []\n for subject in all_rows:\n name: str = subject.find_element(*MaterialLocators.SUBJECT_NAME).text\n teacher: str = subject.find_element(*MaterialLocators.SUBJECT_TEACHER).text\n if (name.strip('. '), teacher.strip('. ')) in subjects:\n url = subject.find_element(*MaterialLocators.SUBJECT_NAME).get_attribute('href')\n subjects_urls.append(url)\n\n self.logger.debug(f'Found subjects urls for {len(subjects_urls)}/{len(subjects)}.')\n return subjects_urls", "def getIdentifiers(self):\n if self._messier == \"\":\n messier = None\n else:\n messier = \"M\" + self._messier\n\n if self._ngc == \"\":\n ngc = None\n else:\n ngc = list(map(str.strip, self._ngc.split(\",\")))\n ngc = list(map(lambda number: \"NGC\" + number, ngc))\n\n if self._ic == \"\":\n ic = None\n else:\n ic = list(map(str.strip, self._ic.split(\",\")))\n ic = list(map(lambda number: \"IC\" + number, ic))\n\n if self._commonnames == \"\":\n commonNames = None\n else:\n commonNames = list(map(str.strip, self._commonnames.split(\",\")))\n\n if self._identifiers == \"\":\n other = None\n else:\n other = list(map(str.strip, self._identifiers.split(\",\")))\n\n return messier, ngc, ic, commonNames, other", "def get_all_names(self) -> (dict, dict):\n return self.get_ref_names(), self.get_other_names()", "def get_names(self):\r\n names = []\r\n for p in self.people:\r\n names.append(p.get_name())\r\n return names", "def __unicode__(self):\n return self.subject", "def subject(self):\n if \"subject\" in self._prop_dict:\n return self._prop_dict[\"subject\"]\n else:\n return None", "def alternative_titles(self, **kwargs):\n\n path = self._get_movie_id_path('alternative_titles')\n resp = self._get_method(path, kwargs)\n return resp", "def get_from_subject(mesid, mailbox):\n res, data = mailbox.fetch(mesid, 'BODY.PEEK[HEADER.FIELDS (SUBJECT FROM)]')\n if res != 'OK':\n raise RuntimeError('error in fetch call for {}'.format(mesid))\n # Apparently default character set for IMAP is UTF7\n myheads = data[0][1].decode('utf-7')\n name = get_from(myheads)\n\n subject = findall(r'Subject:\\s+(.*)\\r\\n', myheads)[0] # Assume match\n return ' '.join((name, ':', subject))", "def names(self):\n return list(item.name for item in self.mechanisms)", "def get_transcript(self):\n\n rna = \"\"\n for i in range(len(self.__sequentie)):\n if self.__sequentie[i] == \"A\":\n rna += \"U\"\n if self.__sequentie[i] == \"T\":\n rna += \"A\"\n if self.__sequentie[i] == \"C\":\n rna += \"G\"\n if self.__sequentie[i] == \"G\":\n rna += \"C\"\n if self.__sequentie[i] == \"N\":\n rna += \"N\"\n return rna", "def nameList(self):\r\n return [self.name.lower(), self.code] + self._otherNames" ]
[ "0.7809142", "0.72061706", "0.70564437", "0.6959005", "0.6878344", "0.6363056", "0.61469597", "0.5626064", "0.5579359", "0.5547272", "0.548135", "0.5419874", "0.53881466", "0.53838485", "0.5289228", "0.5285456", "0.5278819", "0.5261047", "0.5261047", "0.52362436", "0.5235821", "0.5226654", "0.52254504", "0.51880354", "0.5162927", "0.5132712", "0.51277834", "0.5101818", "0.5082812", "0.5057859", "0.5039431", "0.5039431", "0.5028723", "0.50152165", "0.50152165", "0.49986133", "0.49825773", "0.49799162", "0.49502337", "0.49427563", "0.49406308", "0.4939637", "0.49329087", "0.49328104", "0.4930635", "0.4920324", "0.49181893", "0.49173924", "0.49136585", "0.49082556", "0.49008733", "0.49006915", "0.48957944", "0.48889065", "0.48819444", "0.48763126", "0.486495", "0.48540467", "0.48516122", "0.48433316", "0.48370543", "0.48351663", "0.48173234", "0.47914776", "0.4768973", "0.47609466", "0.47519863", "0.47418877", "0.47393212", "0.47360572", "0.47332394", "0.4726718", "0.4719645", "0.47018623", "0.46975088", "0.4690814", "0.46898413", "0.4684754", "0.46838534", "0.46783426", "0.46761763", "0.46744344", "0.4668039", "0.4657993", "0.4657822", "0.46488747", "0.46462977", "0.4644535", "0.4644535", "0.4638168", "0.46353284", "0.46307972", "0.46281877", "0.4625644", "0.46249798", "0.46238494", "0.46142775", "0.4613218", "0.46103245", "0.46094087", "0.46076155" ]
0.0
-1
Return ocsp_urls as GeneralNames
def get_ocsp_gnames(self): urls = ['uri:' + u for u in self.ocsp_urls] return self.load_gnames(urls)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_issuer_urls_gnames(self):\n urls = ['uri:' + u for u in self.issuer_urls]\n return self.load_gnames(urls)", "def urls(self) -> list[str]:\r\n ...", "def getURLs():", "def list_urls(self, prefix: str = \"\", etl_name: str = None) -> Iterable[str]:", "def get_crl_gnames(self):\n urls = ['uri:' + u for u in self.crl_urls]\n return self.load_gnames(urls)", "def inshorts_urls():\n\n url1 = 'https://inshorts.com/en/read/business'\n url2 = 'https://inshorts.com/en/read/sports'\n url3 = 'https://inshorts.com/en/read/technology'\n url4 = 'https://inshorts.com/en/read/entertainment'\n return [url1, url2, url3, url4]", "def _get_kdl_link_names(self):\n num_links = self._urdf_chain.getNrOfSegments()\n link_names = []\n for i in range(num_links):\n link_names.append(self._urdf_chain.getSegment(i).getName())\n return copy.deepcopy(link_names)", "def reponames(gh, user):\n return [u.split('/')[-1] for u in urls(gh, user)]", "def user_name_urls(self):\n raise NotImplementedError", "def get_urls(self):\r\n if self.mod.filename:\r\n return [x + self.mod.filename for x in self.mod.service.get_mirrors()]", "def list_ou(self, _):\n cn_re = re_compile(\"{[^}]+}\")\n results = self.engine.query(self.engine.GPO_INFO_FILTER(), [\"cn\", \"displayName\"])\n gpos = {}\n for gpo in results:\n gpos[gpo[\"cn\"]] = gpo[\"displayName\"]\n\n results = self.engine.query(self.engine.OU_FILTER())\n for result in results:\n print(result[\"distinguishedName\"])\n if \"gPLink\" in result:\n guids = cn_re.findall(result[\"gPLink\"])\n if len(guids) > 0:\n print(\"[gPLink]\")\n print(\"* {}\".format(\"\\n* \".join([gpos[g] if g in gpos else g for g in guids])))", "def url_name(request):\n url_name = False\n if request.resolver_match:\n url_name = request.resolver_match.url_name\n return {\"url_name\": url_name}", "def get_urls():\r\n return []", "def getOrtURLs(self, results):\n pcat = self.portal_catalog\n newresults = []\n for i in results:\n raw_webcode = i.get('webcode')\n if isinstance(raw_webcode, float):\n webcode = str(int(raw_webcode))\n elif isinstance(raw_webcode, int):\n webcode = str(raw_webcode)\n else:\n webcode = raw_webcode\n brains = pcat(Webcode = webcode)\n if len(brains) == 1:\n i['orturl'] = brains[0].getURL()\n else:\n i['orturl'] = ''\n newresults.append(i)\n return newresults", "def Prolinks(promotion_label):\n return prolinks", "def get_urls():\n return (constants.UNREVIEWED.col_values(3) +\n constants.REVIEWED.col_values(3) +\n constants.LAST.col_values(3))", "def names(self) -> list[str]:", "def get_names_url(i):\n urls = list()\n with open('./urls/fall11_urls_train_'+str(i)+'.txt','r',encoding=\"Latin-1\") as f:\n for line in f:\n urls.append(line)\n urls = [url.strip('\\n') for url in urls]\n urls1 = [url.split('\\t')[1] for url in urls]\n names = [url.split('\\t')[0] for url in urls]\n return urls1,names", "def getExpandedLinks():", "def urls(self) -> str:\n return self._data['urls']", "def urls(self):\n days = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', ]\n url = 'http://www2.nngov.com/newport-news/offenses/%stxt.htm'\n return [url % day for day in days]", "def lookups(self, request, model_admin):\n return (\n ('ASSETS', _('ASSETS')),\n ('CHI', _('CHI')),\n ('IMWUT', _('IMWUT')),\n ('TACCESS', _('TACCESS')),\n ('TEI', _('TEI')),\n ('UIST', _('UIST')),\n ('UbiComp', _('UbiComp'))\n )", "def registered_urls(self):\n from pkg_resources import iter_entry_points\n\n entries = ['Priority', 'EP Name', 'Module', 'Class']\n for ep in iter_entry_points('appurl.urls'):\n c = ep.load()\n entries.append([c.match_priority, ep.name, ep.module_name, c.__name__, ])\n\n return entries", "def get_custom_short_paths(content):", "def link_name_list(self):\n return list(self._link_reg.keys())", "def ocsp_url(self):\n\n if self._authority_information_access is None:\n return None\n\n for ad in self._authority_information_access:\n if ad['access_method'].native == 'ocsp' and ad['access_location'].name == 'uniform_resource_identifier':\n return ad['access_location'].chosen.native\n\n return None", "def gen_url(section):\n urls = []\n urls.append('https://ia800500.us.archive.org/22/items/stackexchange/' + section + '.stackexchange.com.7z')\n urls.append('https://ia800500.us.archive.org/22/items/stackexchange/' + section + '.7z')\n return urls", "def list_domain_names():\n pass", "def get_school_name_urls():\n\tschools_tree = get_tree(\"http://www.gla.ac.uk/schools/\")\n\tns = 'http://exslt.org/regular-expressions'\n\tpath = '//div[@class=\"row standardContent\"]//a[re:match(@href, \"schools/[A-Za-z]+/\")]'\n\t# Get all the <a> elements on the page which link to a school page\n\ta_elems = schools_tree.xpath(path, namespaces={'re':ns})\n\tbase_url = \"http://www.gla.ac.uk\"\n\turls = []\n\tnames = []\n\n\tfor a in a_elems:\n\t\t# make school staff page url\n\t\tstaff_page_url = base_url + a.get(\"href\") + \"staff/\"\n\t\turls.append(staff_page_url)\n\t\t# get name of school\n\t\tschool_name = a.text\n\t\tnames.append(school_name)\n\n\t# create list of tuples\n\tschool_names_urls = zip(names, urls)\n\treturn school_names_urls", "def getNames(self) -> List[unicode]:\n ...", "def lookups(self, request, model_admin):\n return (\n ('rien', 'ni rapproché ni pointé'),\n ('p', 'pointé uniquement'),\n ('nrapp', 'non-rapproché'),\n ('rapp', 'rapproché uniquement'),\n ('pr', 'pointé ou rapproché')\n )", "def names(self):\r\n return resource.Name(self)", "def Url(self) -> str:", "def psv_name_list(self):\n return list(self._link_reg.psv_names)", "def extract_names(collection):\n return (\n '[{}]'.format(', '.join(map(repr, groups[n].entity_names)))\n if n in groups else repr(n) for n in collection\n )", "def get_radiobrowser_base_urls():\n hosts = []\n # get all hosts from DNS\n ips = socket.getaddrinfo('all.api.radio-browser.info',\n 80, 0, 0, socket.IPPROTO_TCP)\n for ip_tupple in ips:\n ip = ip_tupple[4][0]\n\n # do a reverse lookup on every one of the ips to have a nice name for it\n host_addr = socket.gethostbyaddr(ip)\n # add the name to a list if not already in there\n if host_addr[0] not in hosts:\n hosts.append(host_addr[0])\n\n # sort list of names\n hosts.sort()\n # add \"https://\" in front to make it an url\n return list(map(lambda x: \"https://\" + x, hosts))", "def simplify_links(proj,exp,links):\n simple_links =[] \n\n for key in links:\n (node_name,x,y) = key.rpartition(':')\n node_name = node_name+\".\"+exp+\".\"+proj+\".emulab.net\"\n simple_links.append((node_name,links[key]['ipaddr']))\n\n return simple_links", "def extract_names(collection):\n return map(repr, collection)", "def get_input_domains():\n df = pandas.read_excel(\"AutoScrapy/files/EBE21 - Top 100 Onlineshops to scrapp.ods\", engine=\"odf\")\n list_of_addresses = df['Domain'].to_list()\n list_of_addresses = [(\"http://\" + address) for address in list_of_addresses]\n print(list_of_addresses)\n return list_of_addresses", "def getcongressURLs():\n\n\tdates = getdates()\n\n\tf = open('congressional_records_URLs2.csv', 'w')\n\ttry:\n\t\tfor dt in range(0, len(dates)):\n\t\t\tdate = str(dates[dt]).replace(\"'\", \"\").replace(\",\", \"-\").replace(\" \", \"\").replace(\"(\", \"\").replace(\")\", \"\")\n\t\t\tfull_url = getfullURL(date)\n\t\t\tf.write(u'%s\\n' % (full_url))\n\tfinally:\n\t\tf.close()", "def osnc(pl, refnd):\n return [refnd[i] for i in pl]", "def sqs_urls(self) -> Sequence[str]:\n return pulumi.get(self, \"sqs_urls\")", "def extract_names_URL(inputURL, source=\"gnrd\", sEngine=0):\r\n #service execution time\r\n start_time = time.time()\r\n if source == \"gnrd\":\r\n final_result = get_sn_url(inputURL, sEngine)\r\n elif source == \"taxonfinder\":\r\n final_result = get_tf_sn_url(inputURL)\r\n else:\r\n return {'status_code': 400, 'message': \"Error: Invalid source name\"} \r\n\r\n end_time = time.time()\r\n execution_time = end_time-start_time\r\n\r\n #service result creation time\r\n creation_time = datetime.datetime.now().isoformat()\r\n\r\n meta_data = {'creation_time': creation_time, 'execution_time': float(\"{:4.2f}\".format(execution_time)), 'source_urls': [\"http://gnrd.globalnames.org/\"] }\r\n\r\n final_result['meta_data'] = meta_data\r\n final_result['total_names'] = len(final_result['scientificNames'])\r\n\r\n return json.dumps(final_result)", "def names(self) -> List:\n ...", "def debug_urls(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:", "def AuthorURLs(entry):\n a_URLs = ''\n for a in entry.getAuthors():\n url = a.get('homepage', ' ')\n a_URLs += \"%s and \" % url\n return a_URLs[:-5]", "def links(self) -> str:\n return pulumi.get(self, \"links\")", "def names(self):\n\t\treturn", "def get_urls(db):\n return db.meta.find_one({'name':\"urls\"})['urls']", "def get_urls(self):\n \n url_strings = dict()\n \n \n for platform in constants.PLATFORMS:\n download_path = reverse('download-installer', kwargs={\n 'build_id': self.build_id,\n 'platform': platform,\n })\n \n url_strings[platform] = settings.BASE_URL.rstrip('/') + download_path\n \n \n return url_strings", "def urls(self) -> Dict[str, str]:\n url_bases = self.url_bases\n unformatted_paths = self._url_module.url_paths\n\n urls = {}\n for url_base in url_bases:\n # The default URL_base will look like: http://service.[..].amazonaws.com/...\n # This extension ensures support for the China & ISO regions\n alt_dns_suffixes = {\"cn\": \"amazonaws.com.cn\"}\n if enable_iso_regions():\n alt_dns_suffixes.update(\n {\n \"iso\": \"c2s.ic.gov\",\n \"isob\": \"sc2s.sgov.gov\",\n \"isoe\": \"cloud.adc-e.uk\",\n \"isof\": \"csp.hci.ic.gov\",\n }\n )\n\n for url_path, handler in unformatted_paths.items():\n url = url_path.format(url_base)\n urls[url] = handler\n for dns_suffix in alt_dns_suffixes.values():\n alt_url_base = re.sub(r\"amazonaws\\\\?.com$\", dns_suffix, url_base)\n alt_url = url_path.format(alt_url_base)\n urls[alt_url] = handler\n\n return urls", "def URLs(self, default=[{}]):\n tmp = self.data.get('metadata', {}).get('urls', default)\n return [HEP.URLObject(i) for i in tmp]", "def get_names_short(self):\r\n return [p.get_name() for p in self.people]", "def result2name(self, result: _Result) -> str:\n parsed_uri = urllib.parse.urlparse(result.uri)\n\n # define (categories of) aggregated sources and split them accordingly:\n if parsed_uri.netloc in [\"bartoc-skosmos.unibas.ch\", \"data.ub.uio.no\", \"vocab.getty.edu\"]:\n return self.uri2name(parsed_uri, n=1)\n elif parsed_uri.netloc in [\"isl.ics.forth.gr\", \"linkeddata.ge.imati.cnr.it\", \"www.yso.fi\"]:\n return self.uri2name(parsed_uri, n=2)\n elif parsed_uri.netloc in [\"vocabs.ands.org.au\"]:\n return self.uri2name(parsed_uri, n=5)\n else:\n return parsed_uri.netloc", "def url(result):\n return result.entities.get(u'urls')", "def get_infobox_urls(mapping_page):\n pattern = re.compile('index\\.php/Mapping_en:Infobox_[-\\w\\./]+')\n return pattern.findall(mapping_page)", "def get_cora_label_names():\n # type: () -> List[str]\n return _label_names", "def get_short_names(self) -> List[str]:\n result = []\n for elements in self._get_results_list():\n result.append(elements[0])\n return result", "def get_docs_urls(self):\n docs_urls = []\n link_labels = []\n for tag in self.post_div.find_all(\"a\"):\n url = tag[\"href\"]\n if url.startswith(\"https://docs.google.com\") or \\\n url.startswith(\"https://drive.google.com\"):\n docs_urls += [url]\n link_labels += [tag.text]\n return docs_urls, link_labels", "def list(self):\n\t\treturn self.link_words", "def obtain_series_links(series_names):\n links = []\n for product in series_names[0]:\n product = product.lower()\n splitted = product.split()\n product = \"-\".join(splitted)\n series_link = \"https://cryptoslam.io/\" + product + \"/mints\" \n links.append((product,series_link))\n return links", "def getAliases(self):", "def url(self):\n if self.term_type != 'C':\n url_fmt = self.path_level_url_fmt\n url_info = {'id': self.term_type}\n else:\n url_fmt = self.obj_level_url_fmt\n url_info = {'org_prefix': self.org_prefix, 'id': self.term_id}\n\n return url_fmt % url_info", "def getSiteExampleURLs(self):\r\n return 'no such example'", "def get_url(url_index: str) -> list:\n url = get_url_category(url_index)\n return url", "def usage(self):\n names = self.sources.keys()\n return sorted([(n.replace('__', '.'), self._resolve(n)) for n in names],\n key=lambda el: el[0])", "def name(self) -> 'Literal[\"Dynamic Reverse Address Resolution Protocol\", \"Inverse Address Resolution Protocol\", \"Reverse Address Resolution Protocol\", \"Address Resolution Protocol\"]': # pylint: disable=line-too-long\n return self._name", "def getUrls(self):\n # in case you need to move from a read only Url to a writeable one, here it gets replaced\n repopath = self.repositoryUrl().replace(\"[git]\", \"\")\n repoString = utils.replaceVCSUrl(repopath)\n [repoUrl, repoBranch, repoTag] = utils.splitVCSUrl(repoString)\n if not repoBranch and not repoTag:\n repoBranch = \"master\"\n print(\"|\".join([repoUrl, repoBranch, repoTag]))\n return True", "def get_names(self):\n\n # log.debug(str(inspect.stack()[1][3]) + \" --> OC.get_names()\")\n return [x.options['name'] for x in self.get_list()]", "def getUrl(self): #$NON-NLS-1$\r", "def getUrl(self): #$NON-NLS-1$\r", "def _findSupplUrls(self, landPage):\n urlParts = ['/suppdata/']\n for urlPart in urlParts:\n suppUrls = findLinksWithUrlPart(landPage, urlPart)\n if len(suppUrls) > 0:\n return suppUrls\n\n return []", "def mapping_names(self):\n return [self.basename]", "def orca_list():\n val = []\n val.append('orca')\n val.append('orca-b3lyp')\n return val", "def getOqiNames( self ):\n\n if self.oqiNames:\n return self.oqiNames.keys()\n\n n = self.adb.get( \"nSrss\" )\n for indx in xrange( n ):\n name = self.adb.get( \"srsName\", indx )\n self.oqiNames[ name ] = indx\n\n return self.oqiNames.keys()", "def urlpatterns(self):\n regex = r'^%s/' % self.label\n urls_module = '%s.urls' % self.name\n ns = self.label\n return [url(regex, include(urls_module, namespace=ns, app_name=ns))]", "def get_urls(self):\n return patterns('')", "def copyurls(door):\n return {name: Url(url.path) for name, url in door.urls.items()}", "def GetResourceNames(self):\r\n return [x.name for x in self.resources]", "def names(cls) -> List[str]:", "def lookups(self, request: HttpRequest, model_admin:\n Type[admin.ModelAdmin]) -> List[Tuple[str, str]]:\n return [\n ('superuser', 'Superuser'),\n ('staff', 'Staff'),\n ('scanlator', 'Scanlator'),\n ('regular', 'Regular')\n ]", "def urls(self):\r\n urls = []\r\n\r\n for url_name in sorted(self.resources.keys()):\r\n\r\n resource = self.resources[url_name]\r\n urls.append(resource.as_url(\r\n api=self,\r\n name_prefix='-'.join(\r\n (self.prefix, self.str_version)).strip('-'),\r\n url_prefix=self.str_version\r\n ))\r\n\r\n return patterns(self.prefix, *urls)", "def service_urls(records, service='odp:url'):\n service_string = 'urn:x-esri:specification:ServiceType:' + service\n urls = []\n for key, rec in records.items():\n # Create a generator object, and iterate through it until the match is\n # found if not found, gets the default value (here \"none\").\n url = next((d['url'] for d in rec.references if\n d['scheme'] == service_string), None)\n if url is not None:\n urls.append(url)\n return urls", "def test_splits_urls_for_nouns(self):\r\n test_value = \"http://google.com/drives/autonomous/cars\"\r\n self.assertEqual(\r\n set([u'cars', u'autonomous']),\r\n suggest_tags(test_value))", "def uri(self) -> list:\n raise NotImplementedError(\"ErddapArgoDataFetcher.uri not implemented\")", "def URLs(self, default=[{}]):\n tmp = self.data.get('urls', default)\n return [HEP.URLObject(i) for i in tmp]", "def items(self):\n return self.namespace_to_alias.items()", "def build_end_url_list(url):\n http_types = [\"http://\", \"https://\"]\n dub_types = [\"www.\", \"\"] # this order needs to preserved for testing at www.hgdatascience.com\n http_dub_urls = [\"{}{}{}\".format(h_type, dub_type, url) for dub_type in dub_types for h_type in http_types]\n return http_dub_urls", "def _reverse_urls(names, course):\r\n return [reverse(name, kwargs={'course_id': course.id.to_deprecated_string()})\r\n for name in names]", "def __get_names(record: TNSRecord) -> Dict[str, str]:\n aliases = {'iau': record.name}\n internal_names = record.internal_names.split(',')\n for provider, pattern in Object.name_patterns.items():\n for name in internal_names:\n if pattern.match(name):\n aliases[provider] = name\n return aliases", "def protocol_names(self):\n l = self.protocols()\n retval = [str(k.name) for k in l]\n return retval", "def __str__(self):\n return gettext('List of %s') % self.resource.__name__", "def get_sp_list():\n bs = get_soup('https://en.wikipedia.org/wiki/List_of_S%26P_500_companies')\n sp_companies = bs.find_all('a', class_=\"external text\")\n return sp_companies", "def names():\n pass", "def url_bases(self) -> List[str]:\n return self._url_module.url_bases", "def getAdditionnalsUrls(self, ip_version):\n urls = []\n url = self.cp.get(self.CORE_SECTION, 'url_v'+str(ip_version), fallback='')\n for u in filter(lambda s: len(s), map(lambda x: x.strip(), url.split(','))):\n urls.append(u)\n return urls", "def test_get_pci_link_list(self):\n pass", "def svn_info_t_URL_get(svn_info_t_self): # real signature unknown; restored from __doc__\n return \"\"", "def url_name(cls):\n return f'{cls.app_label}_{cls.name}'", "def url_list(path):\n match = re.match(r'^.*(/wa/[A-Za-z0-9/-]+)([A-Za-z-]+)/([0-9]+/)?$', path)\n return u'%s%s%s/' % (match.group(1), match.group(2), \n settings.ACTION_ADMIN_LIST_SUFFIX)" ]
[ "0.6319815", "0.6266063", "0.6263989", "0.62039906", "0.60305554", "0.5934846", "0.5844959", "0.583849", "0.58368385", "0.56617916", "0.5622121", "0.5613717", "0.55928856", "0.5585941", "0.55812305", "0.5537399", "0.5493335", "0.5492435", "0.5488834", "0.54792345", "0.54780656", "0.5447123", "0.544228", "0.5426493", "0.5420507", "0.53942716", "0.53900534", "0.5373059", "0.5310811", "0.5306697", "0.5306003", "0.53031456", "0.52980924", "0.5295199", "0.5280255", "0.52774", "0.52745986", "0.52701473", "0.5260482", "0.525634", "0.5249313", "0.524778", "0.52247876", "0.5215761", "0.5198733", "0.51951545", "0.5194517", "0.519423", "0.51879317", "0.51773065", "0.5175462", "0.5167245", "0.5166999", "0.51593226", "0.51539415", "0.5151765", "0.51508045", "0.51480585", "0.5147623", "0.5146885", "0.51437545", "0.5142334", "0.51328987", "0.5128336", "0.5127734", "0.5122561", "0.512246", "0.51209843", "0.5117414", "0.51151335", "0.51151335", "0.5112798", "0.5103389", "0.5103117", "0.51021135", "0.5092243", "0.50909996", "0.5084007", "0.5081182", "0.5068422", "0.5067831", "0.50673366", "0.5063238", "0.50618786", "0.5061333", "0.5058992", "0.5058481", "0.50547063", "0.5052903", "0.5042704", "0.50417453", "0.5041396", "0.50359", "0.5035594", "0.5033372", "0.5030462", "0.50283426", "0.5023916", "0.50229657", "0.50196296" ]
0.72922915
0
Return issuer_urls as GeneralNames
def get_issuer_urls_gnames(self): urls = ['uri:' + u for u in self.issuer_urls] return self.load_gnames(urls)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def subject_alt_uris(self):\n\n return self._get_subject_alt('uniform_resource_identifier')", "def subject_alt_domains(self):\n\n return self._get_subject_alt('dns_name')", "def get_crl_gnames(self):\n urls = ['uri:' + u for u in self.crl_urls]\n return self.load_gnames(urls)", "def extract_gnames(self, ext):\n res = []\n for gn in ext:\n if isinstance(gn, x509.RFC822Name):\n res.append('email:' + as_unicode(gn.value))\n elif isinstance(gn, x509.DNSName):\n res.append('dns:' + as_unicode(gn.value))\n elif isinstance(gn, x509.UniformResourceIdentifier):\n res.append('uri:' + as_unicode(gn.value))\n elif isinstance(gn, x509.IPAddress):\n res.append('ip:' + str(gn.value))\n elif isinstance(gn, x509.DirectoryName):\n val = self.extract_name(gn.value)\n res.append('dn:' + render_name(val))\n else:\n raise InvalidCertificate(\"Unsupported subjectAltName type: %s\" % (gn,))\n return res", "def _get_subject_alt(self, name):\n\n if self._subject_alt_name is None:\n return []\n\n output = []\n for general_name in self._subject_alt_name:\n if general_name.name == name:\n output.append(general_name.native)\n return output", "def issuer(self) -> str:\n return self._issuer", "def _get_kdl_link_names(self):\n num_links = self._urdf_chain.getNrOfSegments()\n link_names = []\n for i in range(num_links):\n link_names.append(self._urdf_chain.getSegment(i).getName())\n return copy.deepcopy(link_names)", "def user_name_urls(self):\n raise NotImplementedError", "def issuer(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"issuer\")", "def issuer_alt_name_value(self):\n\n if self._processed_extensions is False:\n self._set_extensions()\n return self._issuer_alt_name_value", "def ns_list(self):\n return sorted(self.get_ns_name(ns) for ns in self.profile.authoritative_servers)", "def list_urls(self, prefix: str = \"\", etl_name: str = None) -> Iterable[str]:", "def xforwardedforclientcert_issuerdnalias(self) -> str:\n return pulumi.get(self, \"xforwardedforclientcert_issuerdnalias\")", "def AuthorURLs(entry):\n a_URLs = ''\n for a in entry.getAuthors():\n url = a.get('homepage', ' ')\n a_URLs += \"%s and \" % url\n return a_URLs[:-5]", "def getHosterIssns(publisherName):\n global publisherIssns\n global publisherUrls\n if publisherIssns is None:\n journalFname = pubConf.journalTable\n if not isfile(journalFname):\n logging.warn('%s does not exist, cannot ISSN-assign highwire crawler' % journalFname)\n return ({}, set([]))\n publisherIssns = defaultdict(dict)\n publisherUrls = defaultdict(set)\n logging.log(5, 'Parsing %s to get highwire ISSNs' % journalFname)\n for row in maxCommon.iterTsvRows(journalFname):\n if row.source in ('HIGHWIRE', 'WILEY'):\n hoster = row.source\n journalUrl = 'http://' + row.urls.strip().replace('http://', '')\n issn = row.pIssn.strip()\n eIssn = row.eIssn.strip()\n publisherIssns[hoster][issn] = journalUrl\n publisherIssns[hoster][eIssn] = journalUrl\n if journalUrl != '':\n publisherUrls[hoster].add(journalUrl)\n\n return (publisherIssns[publisherName], publisherUrls[publisherName])", "def urls(self) -> list[str]:\r\n ...", "def subject_alt_emails(self):\n\n return self._get_subject_alt('rfc822_name')", "def getURLs():", "def test_uri(self):\n rv = extract_ids(X509_OTHER_NAME)\n assert [\n URIPattern(b\"http://example.com/\")\n ] == [id for id in rv if isinstance(id, URIPattern)]", "def reponames(gh, user):\n return [u.split('/')[-1] for u in urls(gh, user)]", "def get_issuer(site_url=None, request=None):\n site_url = get_site_url(site_url=site_url, request=request)\n path = reverse('oidc_provider:provider-info') \\\n .split('/.well-known/openid-configuration')[0]\n issuer = site_url + path\n\n return str(issuer)", "def issuer_ref(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"issuer_ref\")", "def list_domain_names():\n pass", "def issuer_uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"issuer_uri\")", "def test_cn_ids_are_used_as_fallback(self):\n with pytest.warns(SubjectAltNameWarning):\n rv = extract_ids(X509_CN_ONLY)\n assert [\n DNSPattern(b\"www.microsoft.com\")\n ] == rv", "def link_name_list(self):\n return list(self._link_reg.keys())", "def get_radiobrowser_base_urls():\n hosts = []\n # get all hosts from DNS\n ips = socket.getaddrinfo('all.api.radio-browser.info',\n 80, 0, 0, socket.IPPROTO_TCP)\n for ip_tupple in ips:\n ip = ip_tupple[4][0]\n\n # do a reverse lookup on every one of the ips to have a nice name for it\n host_addr = socket.gethostbyaddr(ip)\n # add the name to a list if not already in there\n if host_addr[0] not in hosts:\n hosts.append(host_addr[0])\n\n # sort list of names\n hosts.sort()\n # add \"https://\" in front to make it an url\n return list(map(lambda x: \"https://\" + x, hosts))", "def get_layer_urls(self):\n urls = []\n\n if getattr(self, 'additional_domains'):\n map(urls.append, (domain for domain in self.additional_domains.split(\";\") if domain))\n\n return urls", "def list_domain_names(self) -> Dict:\n pass", "def iterNamespaceURIs(self):\n return iter(self.namespace_to_alias)", "def get_subjects_urls(self, subjects: Iterable[Subject]) -> List[str]:\n self.logger.debug('Finding subjects urls.')\n all_rows = self.browser.find_elements(*MaterialLocators.SUBJECT_ROW)\n subjects = {(s.name.strip('. '), s.teacher.strip('. ')) for s in subjects}\n subjects_urls = []\n for subject in all_rows:\n name: str = subject.find_element(*MaterialLocators.SUBJECT_NAME).text\n teacher: str = subject.find_element(*MaterialLocators.SUBJECT_TEACHER).text\n if (name.strip('. '), teacher.strip('. ')) in subjects:\n url = subject.find_element(*MaterialLocators.SUBJECT_NAME).get_attribute('href')\n subjects_urls.append(url)\n\n self.logger.debug(f'Found subjects urls for {len(subjects_urls)}/{len(subjects)}.')\n return subjects_urls", "def name_servers(self) -> Sequence[str]:\n return pulumi.get(self, \"name_servers\")", "def issuer_did(self) -> str:\n return self._issuer_did", "def URLs(self, default=[{}]):\n tmp = self.data.get('metadata', {}).get('urls', default)\n return [HEP.URLObject(i) for i in tmp]", "def x_forwarded_for_client_cert_issuer_dn_alias(self) -> Optional[str]:\n return pulumi.get(self, \"x_forwarded_for_client_cert_issuer_dn_alias\")", "def encode_san_dns_names(self, san):\n dns_names = []\n for dns_name in san:\n dns_names.append(x509.DNSName(dns_name))\n return dns_names", "def __get_names(record: TNSRecord) -> Dict[str, str]:\n aliases = {'iau': record.name}\n internal_names = record.internal_names.split(',')\n for provider, pattern in Object.name_patterns.items():\n for name in internal_names:\n if pattern.match(name):\n aliases[provider] = name\n return aliases", "def build_end_url_list(url):\n http_types = [\"http://\", \"https://\"]\n dub_types = [\"www.\", \"\"] # this order needs to preserved for testing at www.hgdatascience.com\n http_dub_urls = [\"{}{}{}\".format(h_type, dub_type, url) for dub_type in dub_types for h_type in http_types]\n return http_dub_urls", "def inshorts_urls():\n\n url1 = 'https://inshorts.com/en/read/business'\n url2 = 'https://inshorts.com/en/read/sports'\n url3 = 'https://inshorts.com/en/read/technology'\n url4 = 'https://inshorts.com/en/read/entertainment'\n return [url1, url2, url3, url4]", "def issuer(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"issuer\")", "def protocol_names(self):\n l = self.protocols()\n retval = [str(k.name) for k in l]\n return retval", "def get_school_name_urls():\n\tschools_tree = get_tree(\"http://www.gla.ac.uk/schools/\")\n\tns = 'http://exslt.org/regular-expressions'\n\tpath = '//div[@class=\"row standardContent\"]//a[re:match(@href, \"schools/[A-Za-z]+/\")]'\n\t# Get all the <a> elements on the page which link to a school page\n\ta_elems = schools_tree.xpath(path, namespaces={'re':ns})\n\tbase_url = \"http://www.gla.ac.uk\"\n\turls = []\n\tnames = []\n\n\tfor a in a_elems:\n\t\t# make school staff page url\n\t\tstaff_page_url = base_url + a.get(\"href\") + \"staff/\"\n\t\turls.append(staff_page_url)\n\t\t# get name of school\n\t\tschool_name = a.text\n\t\tnames.append(school_name)\n\n\t# create list of tuples\n\tschool_names_urls = zip(names, urls)\n\treturn school_names_urls", "def get_urls():\r\n return []", "def identifier_uris(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"identifier_uris\")", "def ssl_commonname(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"ssl_commonname\")", "def allowed_uri_sans(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"allowed_uri_sans\")", "def AlternativeNames(self, default=[None]):\n return self.data.get('alternative_names', default)", "def hs_signers(self):\n return [{'name': u.get_full_name(), 'email': u.email} for u in [self.workspace.lawyer, self.user]]", "def get_author_name_urls(dept_name, dept_url):\n\t# Change to \"School of Humanities\" to match the name used in Enlighten\n\t# Done because the string obtained from http://www.gla.ac.uk/schools/ contains the Gaelic name as well\n\tif \"Humanities\" in dept_name:\n\t\tdept_name = \"School of Humanities\"\n\n\t# get list of names of researchers in department\n\tnames = get_names(dept_url)\n\n\twinning_name_urls = set()\n\n\t# loop through each name\n\tfor name in names:\n\t\tname = initialise_first_name(name)\n\t\t# Get Enlighten page on which author name will be found (page for the letter of author's last name)\n\t\tfull_url = author_list_base + \"index.\"+ name.split(\" \")[0][0] + \".html\"\n\t\ttree = get_tree(full_url)\n\t\t# Get all candidate authors which match the name\n\t\tname_urls = get_name_url_matches(name, tree)\n\t\t# If candidates were found\n\t\tif name_urls:\n\t\t\t# Filter out authors that have already been scraped\n\t\t\tname_urls = [name_url for name_url in name_urls if name_url not in winning_name_urls]\n\t\t\t# Get the first ranked (name, url) tuple for the target name from the remaining candidates\n\t\t\twinning_name_url = get_winning_url(name_urls, dept_name)\n\t\t\tif winning_name_url:\n\t\t\t\twinning_name_urls.add(winning_name_url)\n\n\treturn winning_name_urls", "def get_urls(inputfiles):\n urls = []\n scheme_rgx = re.compile(r'^https?://')\n for ifile in inputfiles:\n urls.append(ifile.read().splitlines())\n urls = set([n for l in urls for n in l])\n urls = list(filter(None, urls))\n for i in range(len(urls)):\n if not scheme_rgx.match(urls[i]):\n urls[i] = 'http://' + urls[i]\n return urls", "def names(self) -> list[str]:", "def URLs(self, default=[{}]):\n tmp = self.data.get('urls', default)\n return [HEP.URLObject(i) for i in tmp]", "def urls(self) -> Dict[str, str]:\n url_bases = self.url_bases\n unformatted_paths = self._url_module.url_paths\n\n urls = {}\n for url_base in url_bases:\n # The default URL_base will look like: http://service.[..].amazonaws.com/...\n # This extension ensures support for the China & ISO regions\n alt_dns_suffixes = {\"cn\": \"amazonaws.com.cn\"}\n if enable_iso_regions():\n alt_dns_suffixes.update(\n {\n \"iso\": \"c2s.ic.gov\",\n \"isob\": \"sc2s.sgov.gov\",\n \"isoe\": \"cloud.adc-e.uk\",\n \"isof\": \"csp.hci.ic.gov\",\n }\n )\n\n for url_path, handler in unformatted_paths.items():\n url = url_path.format(url_base)\n urls[url] = handler\n for dns_suffix in alt_dns_suffixes.values():\n alt_url_base = re.sub(r\"amazonaws\\\\?.com$\", dns_suffix, url_base)\n alt_url = url_path.format(alt_url_base)\n urls[alt_url] = handler\n\n return urls", "def get_urls(self):\r\n if self.mod.filename:\r\n return [x + self.mod.filename for x in self.mod.service.get_mirrors()]", "def issuer(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"issuer\")", "def test_badge_should_have_issuer(self):\n\n badge = self.get_sample_badge()\n self.assertIsInstance(badge.issuer, str)", "def named_entities(self) -> List[str]:", "def getWellKnownDict(self, url=None, issuer=None):\n url = url or self.parameters['issuer'] and '%s/.well-known/openid-configuration' % self.parameters['issuer']\n if not url:\n return S_ERROR('Cannot get %s provider issuer/wellKnow url' % self.parameters['name'])\n try:\n r = self.request('GET', url)\n r.raise_for_status()\n return S_OK(r.json())\n except (self.exceptions.RequestException, ValueError) as e:\n return S_ERROR(\"%s: %s\" % (e.message, r.text))", "def filenames(self):\n names = []\n for furi in np.asarray(self.fileuris).flat:\n names.append(furi)\n return names", "def load_gnames(self, gname_list):\n gnames = []\n for alt in gname_list:\n if ':' not in alt:\n raise InvalidCertificate(\"Invalid gname: %s\" % (alt,))\n t, val = alt.split(':', 1)\n t = t.lower().strip()\n val = val.strip()\n if t == 'dn':\n gn = x509.DirectoryName(self.load_name(parse_dn(val)))\n elif t == 'dns':\n gn = x509.DNSName(val)\n elif t == 'email':\n gn = x509.RFC822Name(val)\n elif t == 'uri':\n gn = x509.UniformResourceIdentifier(val)\n elif t == 'ip':\n if val.find(':') >= 0:\n gn = x509.IPAddress(ipaddress.IPv6Address(val))\n else:\n gn = x509.IPAddress(ipaddress.IPv4Address(val))\n elif t == 'dn':\n gn = x509.DirectoryName(self.load_name(parse_dn(val)))\n elif t == 'net':\n if val.find(':') >= 0:\n gn = x509.IPAddress(ipaddress.IPv6Network(val))\n else:\n gn = x509.IPAddress(ipaddress.IPv4Network(val))\n else:\n raise Exception('Invalid GeneralName: ' + alt)\n gnames.append(gn)\n return gnames", "def items(self):\n return self.namespace_to_alias.items()", "def getNames(self) -> List[unicode]:\n ...", "def get_csr_san(self, csr):\n dns_names = []\n try:\n san = csr.extensions.get_extension_for_class(\n x509.SubjectAlternativeName\n )\n except ExtensionNotFound:\n san = None\n if san:\n for dns_name in san.value:\n dns_names.append(dns_name.value)\n return dns_names", "def get_providers_list(prefix: str, identifier: str) -> Sequence[Tuple[str, str]]:\n rv = []\n for provider, get_url in PROVIDER_FUNCTIONS.items():\n link = get_url(prefix, identifier)\n if link is not None:\n rv.append((provider, link))\n if not rv:\n return rv\n\n bioregistry_link = _get_bioregistry_link(prefix, identifier)\n if not bioregistry_link:\n return rv\n\n # if a default URL is available, it goes first. otherwise the bioregistry URL goes first.\n rv.insert(1 if rv[0][0] == \"default\" else 0, (\"bioregistry\", bioregistry_link))\n return rv", "def getExpandedLinks():", "def get_names(url):\n\t# get html element tree\n\ttree = get_tree(url)\n\t# Names are text within <a> elements in this list\n\t# xpath returns a list with alternating last and first names as elements\n\t# Concatenate each last name and first name pair and put in new list as full name\n\tnames = tree.xpath('//*[@id=\"research-teachinglist\"]/li//a//text()')\n\tfull_names = []\n\tfor i in range(0, len(names)-1, 2):\n\t\tfull_names.append(names[i] + names[i+1])\n\n\treturn full_names", "def arns(self) -> Sequence[str]:\n return pulumi.get(self, \"arns\")", "def ssl_intercept(doc):\n\n urls = doc['browser']['urls']\n \n tmp = []\n for url in urls:\n tmp.append(url.split('/')[2])\n\n return list(set(tmp))", "def css_bundle_names(self):\n yield 'djblets-avatars-config'\n\n for service in self.avatar_service_registry.configurable_services:\n for bundle in service.config_form_class.css_bundle_names:\n yield bundle", "def GetResourceNames(self):\r\n return [x.name for x in self.resources]", "def relevant_domains(self):\n pass", "def getNames():\r\n return [\"Server1\", \"Server2\", \"Client1\", \"Client2\"]", "def extract_domains(self, resp):\n return", "def get_redirect_uris(\n domains: List[str], redirect_path_sign_in: str, redirect_path_sign_out: str\n) -> Dict[str, List[str]]:\n return {\n \"sign_in\": [f\"{domain}{redirect_path_sign_in}\" for domain in domains],\n \"sign_out\": [f\"{domain}{redirect_path_sign_out}\" for domain in domains],\n }", "def get_secondary_afferents_names(self):\n\t\treturn self._secondaryAfferentsNames", "def test_url_subdomain(self):\n subdomains = ct.url_subdomain(\"https://www.bad-actor.services/some/url-thats-long?debug=True\")\n assert isinstance(subdomains, list)\n assert len(subdomains) == 1\n subdomains = ct.url_subdomain(\"https://one.two.bad-actor.services/some/url-thats-long?debug=True\")\n assert subdomains[0] == \"one\"\n assert subdomains[1] == \"two\"", "def friendly_name(self):\n return \"ECDSA CERTIFICATE\"", "def _reverse_urls(names, course):\r\n return [reverse(name, kwargs={'course_id': course.id.to_deprecated_string()})\r\n for name in names]", "def get_names_url(i):\n urls = list()\n with open('./urls/fall11_urls_train_'+str(i)+'.txt','r',encoding=\"Latin-1\") as f:\n for line in f:\n urls.append(line)\n urls = [url.strip('\\n') for url in urls]\n urls1 = [url.split('\\t')[1] for url in urls]\n names = [url.split('\\t')[0] for url in urls]\n return urls1,names", "def hostnames(self) -> Sequence[str]:\n return pulumi.get(self, \"hostnames\")", "def get_init_all_names(self) -> list[str]:\n names = {self.client.name, self.client.alias_name}\n if self.service_resource:\n names.add(self.service_resource.name)\n names.add(self.service_resource.alias_name)\n for waiter in self.waiters:\n names.add(waiter.name)\n for paginator in self.paginators:\n names.add(paginator.name)\n\n result = list(names)\n result.sort()\n return result", "def urls(self) -> str:\n return self._data['urls']", "def __init__(self, issuer):\n self.reference = {\n REF_COUNTRY: {\n REF_NAME: COUNTRY,\n REF_VALUE: '',\n },\n REF_COMMON_NAME: {\n REF_NAME: COMMON_NAME,\n REF_VALUE: ''\n },\n REF_LOCALITY: {\n REF_NAME: LOCALITY,\n REF_VALUE: ''\n },\n REF_STATE_LOCALITY: {\n REF_NAME: STATE_LOCALITY,\n REF_VALUE: ''\n },\n REF_ORGANIZATION: {\n REF_NAME: ORGANIZATION,\n REF_VALUE: ''\n },\n REF_ORGANIZATION_UNIT: {\n REF_NAME: ORGANIZATION_UNIT,\n REF_VALUE: ''\n },\n REF_EMAIL_ADDRESS: {\n REF_NAME: EMAIL_ADDRESS,\n REF_VALUE: ''\n }\n }\n\n issuer_as_text = issuer.__str__()\n issuer_as_text = issuer_as_text.replace('/', '*')\n issuer_as_text = issuer_as_text.split('*')\n issuer_as_text.pop(0)\n\n i = 0\n for data in issuer_as_text:\n master_key = data.split('=')[0]\n secondary_key = REF_NAME\n secondary_value = data.split('=')[1]\n self.reference[master_key][REF_VALUE] = secondary_value\n i += 1", "def get_cert_DNSNames(cert):\n try:\n ext = cert.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME)\n dns_names = ext.value.get_values_for_type(x509.DNSName)\n except Exception:\n raise exception.SysinvException(_(\n \"Failed to get certificate SAN's DNSNames.\"))\n return dns_names", "def normalize_urls(zone_name, files):\n def normalize_url(url):\n \"\"\"Prepend the zone name if the url is not absolute.\"\"\"\n # print(url)\n if not url.startswith('http://') and not url.startswith('https://'):\n return 'https://{}/{}'.format(zone_name, url.replace('//', '/'))\n return url\n return list(map(normalize_url, files))", "def GetValidHostsForCert(cert):\r\n if 'subjectAltName' in cert:\r\n return [x[1] for x in cert['subjectAltName'] if x[0].lower() == 'dns']\r\n else:\r\n return [x[0][1] for x in cert['subject']\r\n if x[0][0].lower() == 'commonname']", "def google_card_certs(self) -> Sequence[str]:\n return pulumi.get(self, \"google_card_certs\")", "def test_lookupNameservers(self):\n servers = {\n ('1.1.2.3', 53): {\n (b'example.com', A): {\n 'rCode': ENAME,\n },\n (b'example.com', NS): {\n 'answers': [(b'example.com', Record_NS(b'ns1.example.com'))],\n },\n },\n }\n resolver = self._getResolver(servers)\n d = resolver.lookupNameservers(b'example.com')\n def getOneName(results):\n ans, auth, add = results\n return ans[0].payload.name\n d.addCallback(getOneName)\n d.addCallback(self.assertEqual, Name(b'ns1.example.com'))\n return d", "def domainlist_reversens(self, response):\n data = response.json()\n for domain in itertools.chain(data['response']['primary_domains'], data['response']['primary_domains']):\n yield(domain.lower())", "def __str__(self):\n return str(self.domains)", "def get_urls(root):\n urls = []\n classes = \"|\".join([\"msl_organisation_list\", \"view-uclu-societies-directory\",\n \"atoz-container\", \"listsocieties\", \"block-og-menu\"])\n\n req = requests.get(root, headers) # , cookies=cookies)\n soup = BeautifulSoup(req.content, 'html.parser')\n main = soup.find(['div', 'ul', 'section'], class_=re.compile(classes))\n\n for a in main.find_all('a', href=True):\n url = a['href']\n if url.startswith(\"/\"):\n urls.append(domain + url)\n\n if url.startswith(\"https://society.tedu.edu\"):\n urls.append(url)\n\n urls = list(dict.fromkeys(urls))\n return urls", "def add_issuer_arguments(parser):\n group = parser.add_argument_group(\"Issuer Information\")\n group.add_argument(\n \"-ik\", \"--issuer_key\",\n help='Key used to certificate the key',\n )\n group.add_argument(\n \"-ic\", \"--issuer_cert\",\n help=\"Certificate used to certificate the key\",\n )\n return group", "def domains(cls) -> Set[str]:\n return set(cls.langs.values())", "def getImageName(self):\n return [os.path.basename(name) for name in self.meta['sources']]", "def getIssuer(self):\n\n return X501DN.from_POW(self.get_POW().getIssuer())", "def getIssuer(self):\n\n return X501DN.from_POW(self.get_POW().getIssuer())", "def get_contribution_links(type, standardized_name, namespaces_and_ids, cycle):\n \n ids = dict([(item['namespace'], item['id']) for item in namespaces_and_ids])\n if cycle == '-1':\n cycle = None\n\n links = [\n dict(text='OpenSecrets.org', url=_get_crp_url(type, standardized_name, ids, cycle)),\n dict(text='FollowTheMoney.org', url=_get_nimsp_url(type, standardized_name, ids, cycle)),\n dict(text='TransparencyData.com', url=_get_td_url(type, standardized_name, ids, cycle)),\n ]\n \n links = filter(lambda link: link['url'] is not None, links)\n\n return links", "def xff_alternative_names(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"xff_alternative_names\")", "def recipients(self) -> ty.List[str]:", "def domains_v2():\n # Is this public?\n configs = get_configs()\n if configs['api_requests'] == 'auth':\n # Auth token in headers\n try:\n auth_token = Token.query.filter_by(auth_token=request.headers.get('Authorization')).first()\n except:\n return {\"alternatives\" : \"Database Error with token!\"}\n if not auth_token:\n return {\"alternatives\": \"Unauthorized!\"}\n\n req_data = request.get_json()\n url = req_data['url']\n if not url:\n return {\"alternatives\" : 'None'}\n \n domain_data = check(url)\n alternatives = {\"alternatives\": domain_data['available_alternatives']}\n return alternatives" ]
[ "0.61785847", "0.59527624", "0.5829397", "0.5827037", "0.5789031", "0.57227594", "0.5698465", "0.5667429", "0.56406856", "0.55386025", "0.5526594", "0.55237424", "0.54851675", "0.5471927", "0.54601336", "0.54285586", "0.5422123", "0.5397905", "0.53853244", "0.53811383", "0.53167045", "0.5294951", "0.5290526", "0.52755785", "0.52402085", "0.5222762", "0.52103925", "0.52001417", "0.5188637", "0.5184422", "0.5172284", "0.5143862", "0.5097312", "0.50960416", "0.50919956", "0.5084084", "0.50829464", "0.508145", "0.50797033", "0.50681627", "0.5049841", "0.5046641", "0.503857", "0.50384337", "0.5028428", "0.50240916", "0.502008", "0.501333", "0.49998426", "0.49946663", "0.49942863", "0.49918473", "0.49633825", "0.49511087", "0.4948359", "0.49477082", "0.49288338", "0.49142393", "0.4908477", "0.4887546", "0.48872265", "0.48862734", "0.48774436", "0.4875086", "0.48691702", "0.4864925", "0.48647797", "0.4858525", "0.4852429", "0.4846417", "0.4840888", "0.48385593", "0.4834561", "0.48302147", "0.48201638", "0.48189318", "0.4818273", "0.48150954", "0.48100328", "0.4802858", "0.47976986", "0.47975606", "0.4795471", "0.4785826", "0.47850883", "0.47837803", "0.47750157", "0.47734505", "0.47731885", "0.47726262", "0.47640496", "0.4762731", "0.47617662", "0.47600004", "0.47448823", "0.47448823", "0.47436658", "0.47427443", "0.4738413", "0.47374701" ]
0.83089095
0
Return crl_urls as GeneralNames
def get_crl_gnames(self): urls = ['uri:' + u for u in self.crl_urls] return self.load_gnames(urls)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_issuer_urls_gnames(self):\n urls = ['uri:' + u for u in self.issuer_urls]\n return self.load_gnames(urls)", "def urls(self) -> list[str]:\r\n ...", "def list_urls(self, prefix: str = \"\", etl_name: str = None) -> Iterable[str]:", "def getURLs():", "def AuthorURLs(entry):\n a_URLs = ''\n for a in entry.getAuthors():\n url = a.get('homepage', ' ')\n a_URLs += \"%s and \" % url\n return a_URLs[:-5]", "def user_name_urls(self):\n raise NotImplementedError", "def reponames(gh, user):\n return [u.split('/')[-1] for u in urls(gh, user)]", "def get_urls(self):\r\n if self.mod.filename:\r\n return [x + self.mod.filename for x in self.mod.service.get_mirrors()]", "def _get_kdl_link_names(self):\n num_links = self._urdf_chain.getNrOfSegments()\n link_names = []\n for i in range(num_links):\n link_names.append(self._urdf_chain.getSegment(i).getName())\n return copy.deepcopy(link_names)", "def get_names_url(i):\n urls = list()\n with open('./urls/fall11_urls_train_'+str(i)+'.txt','r',encoding=\"Latin-1\") as f:\n for line in f:\n urls.append(line)\n urls = [url.strip('\\n') for url in urls]\n urls1 = [url.split('\\t')[1] for url in urls]\n names = [url.split('\\t')[0] for url in urls]\n return urls1,names", "def get_urls():\r\n return []", "def get_urls():\n return (constants.UNREVIEWED.col_values(3) +\n constants.REVIEWED.col_values(3) +\n constants.LAST.col_values(3))", "def get_ocsp_gnames(self):\n urls = ['uri:' + u for u in self.ocsp_urls]\n return self.load_gnames(urls)", "def list_domain_names():\n pass", "def names(self) -> list[str]:", "def link_name_list(self):\n return list(self._link_reg.keys())", "def get_all_category_urls():\n\treturn execute_sql(\"SELECT category_url FROM categories\").fetchall()", "def _reverse_urls(names, course):\r\n return [reverse(name, kwargs={'course_id': course.id.to_deprecated_string()})\r\n for name in names]", "def get_radiobrowser_base_urls():\n hosts = []\n # get all hosts from DNS\n ips = socket.getaddrinfo('all.api.radio-browser.info',\n 80, 0, 0, socket.IPPROTO_TCP)\n for ip_tupple in ips:\n ip = ip_tupple[4][0]\n\n # do a reverse lookup on every one of the ips to have a nice name for it\n host_addr = socket.gethostbyaddr(ip)\n # add the name to a list if not already in there\n if host_addr[0] not in hosts:\n hosts.append(host_addr[0])\n\n # sort list of names\n hosts.sort()\n # add \"https://\" in front to make it an url\n return list(map(lambda x: \"https://\" + x, hosts))", "def getNames(self) -> List[unicode]:\n ...", "def get_school_name_urls():\n\tschools_tree = get_tree(\"http://www.gla.ac.uk/schools/\")\n\tns = 'http://exslt.org/regular-expressions'\n\tpath = '//div[@class=\"row standardContent\"]//a[re:match(@href, \"schools/[A-Za-z]+/\")]'\n\t# Get all the <a> elements on the page which link to a school page\n\ta_elems = schools_tree.xpath(path, namespaces={'re':ns})\n\tbase_url = \"http://www.gla.ac.uk\"\n\turls = []\n\tnames = []\n\n\tfor a in a_elems:\n\t\t# make school staff page url\n\t\tstaff_page_url = base_url + a.get(\"href\") + \"staff/\"\n\t\turls.append(staff_page_url)\n\t\t# get name of school\n\t\tschool_name = a.text\n\t\tnames.append(school_name)\n\n\t# create list of tuples\n\tschool_names_urls = zip(names, urls)\n\treturn school_names_urls", "def getExpandedLinks():", "def get_urls(apk):\n res = []\n for dex in apk.get_all_dex():\n res += re.findall(b'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', dex)\n return [s.decode('utf-8') for s in res]", "def registered_urls(self):\n from pkg_resources import iter_entry_points\n\n entries = ['Priority', 'EP Name', 'Module', 'Class']\n for ep in iter_entry_points('appurl.urls'):\n c = ep.load()\n entries.append([c.match_priority, ep.name, ep.module_name, c.__name__, ])\n\n return entries", "def getOrtURLs(self, results):\n pcat = self.portal_catalog\n newresults = []\n for i in results:\n raw_webcode = i.get('webcode')\n if isinstance(raw_webcode, float):\n webcode = str(int(raw_webcode))\n elif isinstance(raw_webcode, int):\n webcode = str(raw_webcode)\n else:\n webcode = raw_webcode\n brains = pcat(Webcode = webcode)\n if len(brains) == 1:\n i['orturl'] = brains[0].getURL()\n else:\n i['orturl'] = ''\n newresults.append(i)\n return newresults", "def get_urls(inputfiles):\n urls = []\n scheme_rgx = re.compile(r'^https?://')\n for ifile in inputfiles:\n urls.append(ifile.read().splitlines())\n urls = set([n for l in urls for n in l])\n urls = list(filter(None, urls))\n for i in range(len(urls)):\n if not scheme_rgx.match(urls[i]):\n urls[i] = 'http://' + urls[i]\n return urls", "def get_url(url_index: str) -> list:\n url = get_url_category(url_index)\n return url", "def lookups(self, request, model_admin):\n return (\n ('ASSETS', _('ASSETS')),\n ('CHI', _('CHI')),\n ('IMWUT', _('IMWUT')),\n ('TACCESS', _('TACCESS')),\n ('TEI', _('TEI')),\n ('UIST', _('UIST')),\n ('UbiComp', _('UbiComp'))\n )", "def urls(self):\n header = \"URL,Linked From,Discovery Date\"\n gcsv = self.read()\n if gcsv[0] != header:\n raise Exception(\"Unexpected CSV format\")\n urls = set()\n for line in gcsv[1:]:\n # Get everything before the first commar (just the URL)\n line = line[:line.find(\",\")]\n urls.add(line)\n return urls", "def inshorts_urls():\n\n url1 = 'https://inshorts.com/en/read/business'\n url2 = 'https://inshorts.com/en/read/sports'\n url3 = 'https://inshorts.com/en/read/technology'\n url4 = 'https://inshorts.com/en/read/entertainment'\n return [url1, url2, url3, url4]", "def get_cora_label_names():\n # type: () -> List[str]\n return _label_names", "def urls(self):\n days = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', ]\n url = 'http://www2.nngov.com/newport-news/offenses/%stxt.htm'\n return [url % day for day in days]", "async def _transform_hares(self, urls):\n transformed_links = []\n result_list = await self._connect(urls, raw=True)\n for result in result_list:\n url, source_code = result[:2]\n link = re.findall(r'(http://hares.tw/archives/.*?)\\\">繼續閱讀全文', source_code)\n if link:\n transformed_links.append(link[0])\n else: # list is empty\n transformed_links.append(url)\n return transformed_links", "def get_resource_titles(self, html_doc):\n soup = BeautifulSoup(html_doc, 'html.parser')\n links = soup.find_all('a')\n resources = []\n for link in links:\n href = link.get('href') #get id a dict method returns a value for the given key\n if href and '/title' in href and not href in resources:\n resources.append(href)\n return resources", "def URLs(self, default=[{}]):\n tmp = self.data.get('urls', default)\n return [HEP.URLObject(i) for i in tmp]", "def get_urls(self):\n return patterns('')", "def URLs(self, default=[{}]):\n tmp = self.data.get('metadata', {}).get('urls', default)\n return [HEP.URLObject(i) for i in tmp]", "def result2name(self, result: _Result) -> str:\n parsed_uri = urllib.parse.urlparse(result.uri)\n\n # define (categories of) aggregated sources and split them accordingly:\n if parsed_uri.netloc in [\"bartoc-skosmos.unibas.ch\", \"data.ub.uio.no\", \"vocab.getty.edu\"]:\n return self.uri2name(parsed_uri, n=1)\n elif parsed_uri.netloc in [\"isl.ics.forth.gr\", \"linkeddata.ge.imati.cnr.it\", \"www.yso.fi\"]:\n return self.uri2name(parsed_uri, n=2)\n elif parsed_uri.netloc in [\"vocabs.ands.org.au\"]:\n return self.uri2name(parsed_uri, n=5)\n else:\n return parsed_uri.netloc", "def load_links(self) -> Tuple[List[str], List[str]]:\n\n with open(URL_FILE, 'r') as txt_file:\n lines = txt_file.read().split()\n\n urls = []\n for line in lines:\n urls.append(line.split(',')[0])\n \n return lines, urls", "def urls(self) -> str:\n return self._data['urls']", "def list(self):\n\t\treturn self.link_words", "def names(cls) -> List[str]:", "def get_author_name_urls(dept_name, dept_url):\n\t# Change to \"School of Humanities\" to match the name used in Enlighten\n\t# Done because the string obtained from http://www.gla.ac.uk/schools/ contains the Gaelic name as well\n\tif \"Humanities\" in dept_name:\n\t\tdept_name = \"School of Humanities\"\n\n\t# get list of names of researchers in department\n\tnames = get_names(dept_url)\n\n\twinning_name_urls = set()\n\n\t# loop through each name\n\tfor name in names:\n\t\tname = initialise_first_name(name)\n\t\t# Get Enlighten page on which author name will be found (page for the letter of author's last name)\n\t\tfull_url = author_list_base + \"index.\"+ name.split(\" \")[0][0] + \".html\"\n\t\ttree = get_tree(full_url)\n\t\t# Get all candidate authors which match the name\n\t\tname_urls = get_name_url_matches(name, tree)\n\t\t# If candidates were found\n\t\tif name_urls:\n\t\t\t# Filter out authors that have already been scraped\n\t\t\tname_urls = [name_url for name_url in name_urls if name_url not in winning_name_urls]\n\t\t\t# Get the first ranked (name, url) tuple for the target name from the remaining candidates\n\t\t\twinning_name_url = get_winning_url(name_urls, dept_name)\n\t\t\tif winning_name_url:\n\t\t\t\twinning_name_urls.add(winning_name_url)\n\n\treturn winning_name_urls", "def getcongressURLs():\n\n\tdates = getdates()\n\n\tf = open('congressional_records_URLs2.csv', 'w')\n\ttry:\n\t\tfor dt in range(0, len(dates)):\n\t\t\tdate = str(dates[dt]).replace(\"'\", \"\").replace(\",\", \"-\").replace(\" \", \"\").replace(\"(\", \"\").replace(\")\", \"\")\n\t\t\tfull_url = getfullURL(date)\n\t\t\tf.write(u'%s\\n' % (full_url))\n\tfinally:\n\t\tf.close()", "def list_domain_names(self) -> Dict:\n pass", "def GetResourceNames(self):\r\n return [x.name for x in self.resources]", "def _get_name_relurl_and_desc(snippet_html):\n name_and_url_part, desc_part = snippet_html.find_all('p', 'snippet')\n name = name_and_url_part.get_text()\n relative_url = name_and_url_part.find('a').get('href')\n desc = desc_part.get_text()\n return name, relative_url, desc", "def getUrl(self): #$NON-NLS-1$\r", "def getUrl(self): #$NON-NLS-1$\r", "def get_data_urls_and_terms(self, subject):\n print(\"Finding terms with course information available\")\n html = self.call_url_and_get_html_object(\"https://my.gwu.edu/mod/pws/\") # Call home page url\n term_elements = html.findall(\".//div[@class='tableHeaderFont']\") # Obtain list of available terms\n terms = [term.text.lower().strip() for term in term_elements]\n term_urls = []\n for term in terms:\n num_term = translate_term_to_numerical(term)\n self.terms.append(int(num_term))\n term_urls.append((self.get_url_from_term_string(num_term, subject), term))\n return term_urls", "def getUrls(self):\n # in case you need to move from a read only Url to a writeable one, here it gets replaced\n repopath = self.repositoryUrl().replace(\"[git]\", \"\")\n repoString = utils.replaceVCSUrl(repopath)\n [repoUrl, repoBranch, repoTag] = utils.splitVCSUrl(repoString)\n if not repoBranch and not repoTag:\n repoBranch = \"master\"\n print(\"|\".join([repoUrl, repoBranch, repoTag]))\n return True", "def get_course_unit_urls(self):\n\n for url in self.course_page.find_all(attrs=COURSE_UNIT_TITLE):\n self.course_unit_urls.append(url[\"href\"])\n logging.debug(\"course_unit_urls:{}\".format(self.course_unit_urls))\n logging.info(\"Course unit urls retrieved\")", "def test_uri(self):\n rv = extract_ids(X509_OTHER_NAME)\n assert [\n URIPattern(b\"http://example.com/\")\n ] == [id for id in rv if isinstance(id, URIPattern)]", "def urls(self):\r\n urls = []\r\n\r\n for url_name in sorted(self.resources.keys()):\r\n\r\n resource = self.resources[url_name]\r\n urls.append(resource.as_url(\r\n api=self,\r\n name_prefix='-'.join(\r\n (self.prefix, self.str_version)).strip('-'),\r\n url_prefix=self.str_version\r\n ))\r\n\r\n return patterns(self.prefix, *urls)", "def names(self) -> List:\n ...", "def fetch_citylink_refs(self):\n tree = html.fromstring(self.fetch_manifest())\n self_refs = tree.xpath('//table/tr/td/table/tr[position()>4]/td[1]/text()')\n return [x.strip() for x in self_refs[:-1]]", "def gen_links(text):\n return []", "def url_bases(self) -> List[str]:\n return self._url_module.url_bases", "def get_ref_names(self) -> dict :\n return self._dc_names(self._ref_dc())", "def extract_gnames(self, ext):\n res = []\n for gn in ext:\n if isinstance(gn, x509.RFC822Name):\n res.append('email:' + as_unicode(gn.value))\n elif isinstance(gn, x509.DNSName):\n res.append('dns:' + as_unicode(gn.value))\n elif isinstance(gn, x509.UniformResourceIdentifier):\n res.append('uri:' + as_unicode(gn.value))\n elif isinstance(gn, x509.IPAddress):\n res.append('ip:' + str(gn.value))\n elif isinstance(gn, x509.DirectoryName):\n val = self.extract_name(gn.value)\n res.append('dn:' + render_name(val))\n else:\n raise InvalidCertificate(\"Unsupported subjectAltName type: %s\" % (gn,))\n return res", "def getAliases(self):", "def _get_changelist_named_url(self):\n raise NotImplementedError(\n \"You should implement `_get_changelist_named_url`\"\n )", "def tcv_name_list(self):\n return list(self._link_reg.tcv_names)", "def get_docs_urls(self):\n docs_urls = []\n link_labels = []\n for tag in self.post_div.find_all(\"a\"):\n url = tag[\"href\"]\n if url.startswith(\"https://docs.google.com\") or \\\n url.startswith(\"https://drive.google.com\"):\n docs_urls += [url]\n link_labels += [tag.text]\n return docs_urls, link_labels", "def links(self) -> str:\n return pulumi.get(self, \"links\")", "def get_input_domains():\n df = pandas.read_excel(\"AutoScrapy/files/EBE21 - Top 100 Onlineshops to scrapp.ods\", engine=\"odf\")\n list_of_addresses = df['Domain'].to_list()\n list_of_addresses = [(\"http://\" + address) for address in list_of_addresses]\n print(list_of_addresses)\n return list_of_addresses", "def get_custom_short_paths(content):", "def read_urls(filename, server_name='http://code.google.com/'):\n # Construct unique URLs from file as - http://code.google.com/<url from file>\n animal_list = []\n ordered_list = []\n src_file = open(filename, 'rU')\n for line in src_file :\n animal_path = re.search( 'GET\\s+/(.+jpg)', line )\n if animal_path is not None :\n if animal_path.group(1) not in animal_list :\n animal_list.append( animal_path.group(1) )\n ordered_list = sorted(animal_list,key=sort_img_name)\n # Used in in range loop to operate on ordered_list rather than shallow copy, e.g. for path in ordered_list\n for i in range(0, len(ordered_list), 1) :\n ordered_list[i] = server_name + ordered_list[i]\n return ordered_list", "def names(self):\r\n return resource.Name(self)", "def get_layer_urls(self):\n urls = []\n\n if getattr(self, 'additional_domains'):\n map(urls.append, (domain for domain in self.additional_domains.split(\";\") if domain))\n\n return urls", "def get_resource_urls():\n base_url = 'http://developer.pardot.com/'\n pattern = re.compile(\n r'(?ims)\\<a [^>]*?href=\"(kb/api-version-3/[^>]*?/)\"[^>]*?\\>'\n r'[^<]*?\\</a\\>')\n response = requests.get(base_url)\n return [\n '%s/%s' % (base_url, url) for url in pattern.findall(response.text)]", "def names(self):\n\t\treturn", "def get_init_all_names(self) -> list[str]:\n names = {self.client.name, self.client.alias_name}\n if self.service_resource:\n names.add(self.service_resource.name)\n names.add(self.service_resource.alias_name)\n for waiter in self.waiters:\n names.add(waiter.name)\n for paginator in self.paginators:\n names.add(paginator.name)\n\n result = list(names)\n result.sort()\n return result", "def get_urls(root):\n urls = []\n classes = \"|\".join([\"msl_organisation_list\", \"view-uclu-societies-directory\",\n \"atoz-container\", \"listsocieties\", \"block-og-menu\"])\n\n req = requests.get(root, headers) # , cookies=cookies)\n soup = BeautifulSoup(req.content, 'html.parser')\n main = soup.find(['div', 'ul', 'section'], class_=re.compile(classes))\n\n for a in main.find_all('a', href=True):\n url = a['href']\n if url.startswith(\"/\"):\n urls.append(domain + url)\n\n if url.startswith(\"https://society.tedu.edu\"):\n urls.append(url)\n\n urls = list(dict.fromkeys(urls))\n return urls", "def get_links_from_url(url):\n return [get_base(url)]", "def gather_headlines(urls):\n pass", "def councils(self):\n catalog = getToolByName(self.context, 'portal_catalog')\n\n return [dict(url=council.getURL(), title=council.Title,\n address=council.Description) for council in\n catalog({'object_provides': ICouncil.__identifier__,\n 'path': dict(query='/'.join(self.context.getPhysicalPath()),\n depth=1), 'sort_on': 'sortable_title'})]", "def get_all_clubs():\n\turl = \"http://fas-mini-sites.fas.harvard.edu/osl/grouplist\"\n\n\tr = rq.get(url)\n\tsoup = BeautifulSoup(r.text)\n\tlinks = soup.find_all('a')\n\n\tlinkArray = []\n\tnameArray = []\n\n\tfor link in links:\n\t\tl = link.get('href')\n\t\tlinkArray.append(l)\n\t\tname = link.get_text()\n\t\tname = name.encode('ascii','ignore')\n\t\tnameArray.append(name)\n\n\treturn nameArray, linkArray", "def get_urls(self) -> Dict[str, str]:\n return {}", "def url_name(request):\n url_name = False\n if request.resolver_match:\n url_name = request.resolver_match.url_name\n return {\"url_name\": url_name}", "def test_colon_in_url_name(self):\r\n\r\n print(\"Starting import\")\r\n # Not using get_courses because we need the modulestore object too afterward\r\n modulestore = XMLModuleStore(DATA_DIR, course_dirs=['toy'])\r\n courses = modulestore.get_courses()\r\n self.assertEquals(len(courses), 1)\r\n course = courses[0]\r\n\r\n print(\"course errors:\")\r\n for (msg, err) in modulestore.get_course_errors(course.id):\r\n print(msg)\r\n print(err)\r\n\r\n chapters = course.get_children()\r\n self.assertEquals(len(chapters), 5)\r\n\r\n ch2 = chapters[1]\r\n self.assertEquals(ch2.url_name, \"secret:magic\")\r\n\r\n print(\"Ch2 location: \", ch2.location)\r\n\r\n also_ch2 = modulestore.get_item(ch2.location)\r\n self.assertEquals(ch2, also_ch2)\r\n\r\n print(\"making sure html loaded\")\r\n loc = course.id.make_usage_key('html', 'secret:toylab')\r\n html = modulestore.get_item(loc)\r\n self.assertEquals(html.display_name, \"Toy lab\")", "def urls(gh, user):\n return [repo.url for repo in getuserrepos(gh, user)]", "def load_gnames(self, gname_list):\n gnames = []\n for alt in gname_list:\n if ':' not in alt:\n raise InvalidCertificate(\"Invalid gname: %s\" % (alt,))\n t, val = alt.split(':', 1)\n t = t.lower().strip()\n val = val.strip()\n if t == 'dn':\n gn = x509.DirectoryName(self.load_name(parse_dn(val)))\n elif t == 'dns':\n gn = x509.DNSName(val)\n elif t == 'email':\n gn = x509.RFC822Name(val)\n elif t == 'uri':\n gn = x509.UniformResourceIdentifier(val)\n elif t == 'ip':\n if val.find(':') >= 0:\n gn = x509.IPAddress(ipaddress.IPv6Address(val))\n else:\n gn = x509.IPAddress(ipaddress.IPv4Address(val))\n elif t == 'dn':\n gn = x509.DirectoryName(self.load_name(parse_dn(val)))\n elif t == 'net':\n if val.find(':') >= 0:\n gn = x509.IPAddress(ipaddress.IPv6Network(val))\n else:\n gn = x509.IPAddress(ipaddress.IPv4Network(val))\n else:\n raise Exception('Invalid GeneralName: ' + alt)\n gnames.append(gn)\n return gnames", "def orca_list():\n val = []\n val.append('orca')\n val.append('orca-b3lyp')\n return val", "def handle_url(url, session, res):\n print(\"Parsing\", url, file=sys.stderr)\n try:\n data, baseUrl = getPageContent(url, session)\n except IOError as msg:\n print(\"ERROR:\", msg, file=sys.stderr)\n return\n for match in url_matcher.finditer(data):\n shortname = match.group(1)\n name = unescape(match.group(2))\n name = asciify(name.replace('&', 'And').replace('@', 'At'))\n name = capfirst(name)\n if name in exclude_comics:\n continue\n if contains_case_insensitive(res, name):\n # we cannot handle two comics that only differ in case\n print(\"INFO: skipping possible duplicate\", repr(name), file=sys.stderr)\n continue\n res[name] = shortname", "def get_names(url):\n\t# get html element tree\n\ttree = get_tree(url)\n\t# Names are text within <a> elements in this list\n\t# xpath returns a list with alternating last and first names as elements\n\t# Concatenate each last name and first name pair and put in new list as full name\n\tnames = tree.xpath('//*[@id=\"research-teachinglist\"]/li//a//text()')\n\tfull_names = []\n\tfor i in range(0, len(names)-1, 2):\n\t\tfull_names.append(names[i] + names[i+1])\n\n\treturn full_names", "def aliases(self):\n a = BlogAlias.objects.filter(blog=self)\n a = \" | \".join([i.domain for i in a])\n return a", "def init_urls(self):\n url = 'http://www.lagou.com/'\n for ip_info in self.col.find(no_cursor_timeout=True):\n ip, port = ip_info['ip'], ip_info['port']\n if ip and port:\n self.urls.append((url, ip, port)) # tuple", "def reformat(array):\n global searched_domain\n response = []\n for tag in array:\n link = tag.get(\"href\", None)\n if link is not None:\n p = parse.urlparse(link)\n if re.match(searched_netloc, p.netloc):\n if p.scheme == \"\":\n link = parse.ParseResult(\"http\", *p[1:]).geturl()\n response.append(link)\n return response", "def lookups(self, request, model_admin):\n return (\n ('rien', 'ni rapproché ni pointé'),\n ('p', 'pointé uniquement'),\n ('nrapp', 'non-rapproché'),\n ('rapp', 'rapproché uniquement'),\n ('pr', 'pointé ou rapproché')\n )", "def url(self):\n if self.term_type != 'C':\n url_fmt = self.path_level_url_fmt\n url_info = {'id': self.term_type}\n else:\n url_fmt = self.obj_level_url_fmt\n url_info = {'org_prefix': self.org_prefix, 'id': self.term_id}\n\n return url_fmt % url_info", "def get_text_data_child_list(self):\n return self.address_list + self.urls", "def urls(self):\n return self._list_urls()", "def items(self):\n return self.namespace_to_alias.items()", "def url(result):\n return result.entities.get(u'urls')", "def build_end_url_list(url):\n http_types = [\"http://\", \"https://\"]\n dub_types = [\"www.\", \"\"] # this order needs to preserved for testing at www.hgdatascience.com\n http_dub_urls = [\"{}{}{}\".format(h_type, dub_type, url) for dub_type in dub_types for h_type in http_types]\n return http_dub_urls", "def test_splits_urls_for_nouns(self):\r\n test_value = \"http://google.com/drives/autonomous/cars\"\r\n self.assertEqual(\r\n set([u'cars', u'autonomous']),\r\n suggest_tags(test_value))", "def urls(self):\n patterns = []\n for sitecomp in self.modules():\n patterns.append(sitecomp.urls)\n pass\n return patterns", "def protocol_names(self):\n l = self.protocols()\n retval = [str(k.name) for k in l]\n return retval", "def get_links(self) -> List[str]:\n return self.__links" ]
[ "0.6717781", "0.6483335", "0.6474641", "0.637163", "0.6109357", "0.59807044", "0.5915114", "0.5866936", "0.57854867", "0.575787", "0.57498235", "0.57257307", "0.56991327", "0.5698962", "0.5662451", "0.5650308", "0.55682266", "0.5550901", "0.55472463", "0.5547159", "0.55458784", "0.55447024", "0.55304193", "0.55018187", "0.54978037", "0.5464538", "0.5461574", "0.54599947", "0.5459908", "0.54295915", "0.5410805", "0.5409341", "0.5387773", "0.538777", "0.5384889", "0.5383985", "0.53748333", "0.5366707", "0.53521365", "0.5348593", "0.534354", "0.53393674", "0.53156674", "0.53122246", "0.53115517", "0.5302395", "0.5293299", "0.5286654", "0.5286654", "0.528157", "0.5279437", "0.5278996", "0.5276029", "0.52755314", "0.5257678", "0.52553684", "0.5244604", "0.5244073", "0.52412474", "0.524077", "0.52399087", "0.5235011", "0.5234583", "0.5221739", "0.5215644", "0.52035856", "0.51982", "0.5189835", "0.5179851", "0.51786613", "0.5168309", "0.5164529", "0.5160801", "0.5157972", "0.5157333", "0.51535773", "0.5146839", "0.5145321", "0.5144308", "0.5142356", "0.5138261", "0.5135787", "0.51314414", "0.5126466", "0.5124157", "0.5119521", "0.5118743", "0.51088464", "0.5106734", "0.51038307", "0.5100621", "0.5099308", "0.508776", "0.5085975", "0.5082806", "0.5082635", "0.5077161", "0.5074388", "0.5065202", "0.50639826" ]
0.69459665
0
Return TLS Feature list
def get_tls_features(self): tls_features = [] if self.ocsp_must_staple: tls_features.append(x509.TLSFeatureType.status_request) if self.ocsp_must_staple_v2: tls_features.append(x509.TLSFeatureType.status_request_v2) return tls_features
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _runtime_feature_list(self):\n supported_features_command = [self._path_to_driver(), '--print-supported-features']\n try:\n output = self._executive.run_command(supported_features_command, error_handler=Executive.ignore_error)\n except OSError, e:\n _log.warn(\"Exception running driver: %s, %s. Driver must be built before calling WebKitPort.test_expectations().\" % (supported_features_command, e))\n return None\n\n # Note: win/DumpRenderTree.cpp does not print a leading space before the features_string.\n match_object = re.match(\"SupportedFeatures:\\s*(?P<features_string>.*)\\s*\", output)\n if not match_object:\n return None\n return match_object.group('features_string').split(' ')", "def libinfo_features():\n lib_features = ctypes.POINTER(LibFeature)()\n lib_features_size = ctypes.c_size_t()\n check_call(_LIB.MXLibInfoFeatures(ctypes.byref(lib_features), ctypes.byref(lib_features_size)))\n feature_list = [lib_features[i] for i in range(lib_features_size.value)]\n return feature_list", "def features(self):\n buf = (ctypes.c_char * self.MAX_BUF_SIZE)()\n self._dll.JLINKARM_GetFeatureString(buf)\n\n result = ctypes.string_at(buf).decode().strip()\n if len(result) == 0:\n return list()\n\n return result.split(', ')", "def GetFeatures(self):\n try:\n return self._SendRequest(HTTP_GET, \"/%s/features\" % GANETI_RAPI_VERSION,\n None, None)\n except GanetiApiError as err:\n # Older RAPI servers don't support this resource\n if err.code == HTTP_NOT_FOUND:\n return []\n\n raise", "def get_feature_names():\n return ['UserID', 'SessionID', 'TaskName', 'Orientation', 'TapType'] + get_numerical_feature_names()", "def get_all_features(config: Config) -> typing.List[str]:\n return [feature.name for feature in config.features]", "def supported_features(self):\n return self._supported_features", "def feature_list(self):\n return self._feature_list.Decode(self._encoded_fields['feature_list'])", "def supported_features(self):\n return self._support_flags", "def supported_features(self):\n return self._support_flags", "def supported_features(self):\n return self._support_flags", "def supported_features(self):\n return self._support_flags", "def has_tls_support(self):\n return \"STARTTLS\" in self.__capabilities", "def getFeatures2(url, label):\r\n result = []\r\n url = str(url)\r\n \r\n #add the url to feature set\r\n result.append(url)\r\n \r\n #parse the URL and extract the domain information\r\n path = urlparse(url)\r\n ext = tldextract.extract(url)\r\n \r\n #counting number of dots in subdomain \r\n result.append(countdots(ext.subdomain))\r\n \r\n #checking hyphen in domain \r\n result.append(CountSoftHyphen(path.netloc))\r\n \r\n #length of URL \r\n result.append(length(url))\r\n \r\n #checking @ in the url \r\n result.append(CountAt(path.netloc))\r\n \r\n #checking presence of double slash \r\n result.append(CountDSlash(path.path))\r\n \r\n #Count number of subdir \r\n result.append(countSubDir(path.path))\r\n \r\n #number of sub domain \r\n result.append(countSubDomain(ext.subdomain))\r\n \r\n #length of domain name \r\n path2 = urlparse(url_format(url))\r\n result.append(len(path2.netloc)) \r\n \r\n #count number of queries \r\n result.append(len(path.query))\r\n \r\n #Adding domain information\r\n \r\n #if IP address is being used as a URL \r\n result.append(containsip(ext.domain))\r\n \r\n #presence of Suspicious_TLD\r\n result.append(1 if ext.suffix in Suspicious_TLD else 0)\r\n \r\n #append default for create_age(months)country\r\n result.append(-1)\r\n \r\n #append default for expiry_age(months)\r\n result.append(-1)\r\n \r\n #append default for update_age(days)\r\n result.append(-1)\r\n \r\n #append default for country\r\n result.append('None')\r\n \r\n #append extension\r\n path = urlparse(url)\r\n \r\n if get_ext(path.path) == '':\r\n result.append('None')\r\n else:\r\n result.append(get_ext(path.path))\r\n \r\n #append label\r\n result.append(str(label))\r\n \r\n return result", "def getSupportedFeaturesV2( self ):\n\n\t\ttry:\n\t\t\trgs = [\n\t\t\t\t'/usr/local/bin/make-key',\n\t\t\t\t'-J'\n\t\t\t]\n\n\t\t\toCMD = subprocess.Popen(\n\t\t\t\trgs,\n\t\t\t\tstdin=subprocess.PIPE,\n\t\t\t\tstdout=subprocess.PIPE,\n\t\t\t\tstderr=subprocess.STDOUT,\n\t\t\t\tshell=False,\n\t\t\t\tclose_fds=True\n\t\t\t)\n\t\t\tsOutput = oCMD.communicate()[ 0 ]\n\t\t\tbStatus = oCMD.returncode\n\n\t\t\tif bStatus != 1:\n\t\t\t\traise Exception( 'make-key returned bad exit status' )\n\n\t\t\treturn sOutput.strip()\n\n\t\texcept Exception, e:\n\t\t\terrMsg( 'error while getting supported features V2' )\n\t\t\terrMsg( e )\n\t\t\traise Exception, \"System error while querying for supported features.\"", "async def getFeatures(self, body=\"\"):\n payload = {}\n \n # Parameter validation\n schema = ConfigurationValidator.getFeatures()\n schema.dump(schema.load(payload))\n \n\n url_with_params = await create_url_with_params(api_url=self._urls[\"getFeatures\"], proccessed_params=\"\"\"{\"required\":[],\"optional\":[],\"query\":[],\"headers\":[],\"path\":[]}\"\"\", )\n query_string = await create_query_string()\n headers = {\n \"Authorization\": \"Bearer \" + base64.b64encode(\"{}:{}\".format(self._conf.applicationID, self._conf.applicationToken).encode()).decode()\n }\n if self._conf.locationDetails:\n headers[\"x-location-detail\"] = ujson.dumps(self._conf.locationDetails)\n for h in self._conf.extraHeaders:\n headers.update(h)\n exclude_headers = []\n for key, val in headers.items():\n if not key.startswith(\"x-fp-\"):\n exclude_headers.append(key)\n return await AiohttpHelper().aiohttp_request(\"GET\", url_with_params, headers=get_headers_with_signature(urlparse(self._urls[\"getFeatures\"]).netloc, \"get\", await create_url_without_domain(\"/service/application/configuration/v1.0/feature\", ), query_string, headers, body, exclude_headers=exclude_headers), data=body, cookies=self._conf.cookies)", "def listFeatures() :\n global features\n features = [feature.split(\".\")[0] for feature in os.listdir(os.path.abspath(__file__)[:-11])\n if feature.endswith(\".py\") and feature != \"__init__.py\"]", "def feature_list(self):\n components = self._device_info.get(device_data_constants.KEY_COMPONENT, {})\n # Set is_rma_device.\n components['is_rma_device'] = self._is_rma_device\n return self._feature_list.Encode(components)", "def list_ciphers():\n global AVAILABLE_CIPHERS\n print(\"[!] Available ciphers: \")\n for i in range(len(AVAILABLE_CIPHERS)):\n print(\" ----> %s.%s\"%(i+1, AVAILABLE_CIPHERS[i]))\n exit()", "def list_feature_tests(self):\n\t\treturn self.test_names", "def test_get_tls(matrix):\n matrix.charm_config[\"enable-tls\"] = True\n result = matrix.get_tls()\n assert result is True\n matrix.charm_config[\"enable-tls\"] = False\n result = matrix.get_tls()\n assert result is False", "def readFeatures(self):\n\t\treturn self._fileSystem.readFeatures()", "def ciphers(self) -> Sequence[str]:\n return pulumi.get(self, \"ciphers\")", "def ciphers(self) -> Sequence[str]:\n return pulumi.get(self, \"ciphers\")", "def get_features(self):\n return []", "def findFeatures(self):\n\t\tpass", "def feature_list(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FeatureList]:", "def do_features_request_2(features=None):\n\n #  connect to database\n cur_db = connect_db(\"172.20.38.50\", \"mvelay\", \"user\", \"sandbox\")\n cursor = cur_db.cursor()\n\n # build whole query\n cur_query = \"\"\" SELECT module, sw, version FROM t_feature\n WHERE feature=\"%s\" AND supported=1;\"\"\" % (features[0])\n\n print cur_query\n cursor.execute(cur_query)\n results = cursor.fetchall()\n cursor.close()\n\n if results:\n results = results[:1000] # Limit to first 1000 results\n else:\n results = None\n\n return features[0], results", "def getFeatures(self, state, action):\n util.raiseNotDefined()", "def supported_features(self):\n return SUPPORT_FLAGS", "def supported_features(self):\n return SUPPORT_FLAGS", "def supported_features(self):\n return SUPPORT_FLAGS", "def supported_features(self):\n return SUPPORT_FLAGS", "def ciphers(self):\n return self._ciphers", "def test_get_features():\n features = (\n \"Feature Name : Capa1\\r\\n State : Enabled\\r\\n\"\n \"Feature Name : Capa2\\r\\n State : Disabled\\r\\n\"\n )\n\n mock = MagicMock(return_value=features)\n with patch.dict(dism.__salt__, {\"cmd.run\": mock}):\n out = dism.get_features()\n mock.assert_called_once_with(\n [dism.bin_dism, \"/English\", \"/Online\", \"/Get-Features\"]\n )\n assert out == [\"Capa1\", \"Capa2\"]", "def feature_flags(self):\r\n return self.env_tokens.get('FEATURES', dict())", "def test_available_features():\n features = (\n \"Feature Name : Capa1\\r\\n State : Enabled\\r\\n\"\n \"Feature Name : Capa2\\r\\n State : Disabled\\r\\n\"\n )\n\n mock = MagicMock(return_value=features)\n with patch.dict(dism.__salt__, {\"cmd.run\": mock}):\n out = dism.available_features()\n mock.assert_called_once_with(\n [dism.bin_dism, \"/English\", \"/Online\", \"/Get-Features\"]\n )\n assert out == [\"Capa2\"]", "def check_supported_features(self):", "def getFeatures(self, state, action):\n util.raiseNotDefined()", "def getFeatures(self, state, action):\n util.raiseNotDefined()", "def getFeatures(self, state, action):\n util.raiseNotDefined()", "def getFeatures(self, state, action, thisAgent):\n util.raiseNotDefined()", "def fusion_api_get_security_protocols(self, uri=None, api=None, headers=None, param='/protocols'):\n return self.security_standards.get(uri=uri, api=api, headers=headers, param=param)", "def getFeatures(self):\n return \"1:\" + str(self.getEnergy()) + \\\n \" 2:\" + str(self.getCentroid()) + \\\n \" 3:\" + str(self.getZCrossingRate()) + \\\n \" 4:\" + str(self.getBandwidth())", "def output_features(self) -> List[str]:\n return self._pipeline.features", "def _config_tls(self):\n pass", "def _parse(self, features):\n supported = irc.ServerSupportedFeatures()\n features = [\"{}={}\".format(name, value or \"\") for name, value in features]\n supported.parse(features)\n return supported", "def list_features(\n self,\n ) -> Callable[\n [featurestore_service.ListFeaturesRequest],\n Awaitable[featurestore_service.ListFeaturesResponse],\n ]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"list_features\" not in self._stubs:\n self._stubs[\"list_features\"] = self.grpc_channel.unary_unary(\n \"/google.cloud.aiplatform.v1beta1.FeaturestoreService/ListFeatures\",\n request_serializer=featurestore_service.ListFeaturesRequest.serialize,\n response_deserializer=featurestore_service.ListFeaturesResponse.deserialize,\n )\n return self._stubs[\"list_features\"]", "def get_features(self):\n return self._features", "def test_get_hyperflex_capability_info_list(self):\n pass", "def supported_features(self) -> int:\n return self._support_flags", "def features(self) -> List[Feature]:\n return self._features", "def ftp_FEAT(self, line):\n features = ['MDTM','REST STREAM','SIZE','TVFS']\n features.sort()\n self.push(\"211-Features supported:\\r\\n\")\n self.push(\"\".join([\" %s\\r\\n\" %x for x in features]))\n self.respond('211 End FEAT.')", "def get_supported_feature_sets(flags) -> List[str]:\n\n # find all supported feature sets\n supported = []\n for one_feature_set in sorted(REQUIRED_FEATURES.keys()):\n if supports_feature_set(flags, one_feature_set):\n supported.append(one_feature_set)\n return supported", "def supported_features(self) -> int:\n return self._supported_features", "def get_loaded_features(self) -> list[FeatureModule]:\n return list(self._features.values())", "def features(self):\n return self._features", "def _get_features(self, session):\n feature_utils.qsr_feature_extractor( session, get_location_objects = feature_utils.get_location_objects_most_active )\n feature_utils.standardize_simple(session, self.config)\n\n # feature_utils.marker_feature_extractor( session, get_location_objects = feature_utils.get_location_objects_most_active )\n\n return session[SESSION_FEAT]", "def get_features(self):\n features = {}\n for i in self.binaries:\n features[i] = self.binaries[i].features\n return features", "def get_feature_names(self):\n ...", "def getFeatures(url, label, w):\r\n result = []\r\n url = str(url)\r\n \r\n #add the url to feature set\r\n result.append(url)\r\n \r\n #parse the URL and extract the domain information\r\n path = urlparse(url)\r\n ext = tldextract.extract(url)\r\n \r\n #counting number of dots in subdomain \r\n result.append(countdots(ext.subdomain))\r\n \r\n #checking hyphen in domain \r\n result.append(CountSoftHyphen(path.netloc))\r\n \r\n #length of URL \r\n result.append(length(url))\r\n \r\n #checking @ in the url \r\n result.append(CountAt(path.netloc))\r\n \r\n #checking presence of double slash \r\n result.append(CountDSlash(path.path))\r\n \r\n #Count number of subdir \r\n result.append(countSubDir(path.path))\r\n \r\n #number of sub domain \r\n result.append(countSubDomain(ext.subdomain))\r\n \r\n #length of domain name \r\n path2 = urlparse(url_format(url))\r\n result.append(len(path2.netloc))\r\n \r\n #count number of queries \r\n result.append(len(path.query))\r\n \r\n #Adding domain information\r\n \r\n #if IP address is being used as a URL \r\n result.append(containsip(ext.domain))\r\n \r\n #presence of Suspicious_TLD\r\n result.append(1 if ext.suffix in Suspicious_TLD else 0)\r\n \r\n #Get domain information by asking whois\r\n avg_month_time=365.2425/12.0\r\n \r\n #calculate creation age in months\r\n \r\n if w.creation_date == None or type(w.creation_date) is str :\r\n result.append(-1)\r\n \r\n else:\r\n if(type(w.creation_date) is list): \r\n create_date=w.creation_date[-1]\r\n else:\r\n create_date=w.creation_date\r\n\r\n if(type(create_date) is datetime.datetime):\r\n today_date=datetime.datetime.now()\r\n create_age_in_mon=((today_date - create_date).days)/avg_month_time\r\n create_age_in_mon=round(create_age_in_mon)\r\n result.append(create_age_in_mon)\r\n \r\n else:\r\n result.append(-1)\r\n \r\n #calculate expiry age in months\r\n \r\n if(w.expiration_date==None or type(w.expiration_date) is str):\r\n result.append(-1)\r\n else:\r\n if(type(w.expiration_date) is list):\r\n expiry_date=w.expiration_date[-1]\r\n else:\r\n expiry_date=w.expiration_date\r\n if(type(expiry_date) is datetime.datetime):\r\n today_date=datetime.datetime.now()\r\n expiry_age_in_mon=((expiry_date - today_date).days)/avg_month_time\r\n expiry_age_in_mon=round(expiry_age_in_mon)\r\n\r\n # appending in months Appended to the Vector\r\n result.append(expiry_age_in_mon)\r\n else:\r\n # expiry date error so append -1\r\n result.append(-1)\r\n\r\n #find the age of last update\r\n \r\n if(w.updated_date==None or type(w.updated_date) is str):\r\n result.append(-1)\r\n else:\r\n if(type(w.updated_date) is list):\r\n update_date=w.updated_date[-1]\r\n else:\r\n update_date=w.updated_date\r\n if(type(update_date) is datetime.datetime):\r\n today_date=datetime.datetime.now()\r\n update_age_in_days=((today_date - update_date).days)\r\n result.append(update_age_in_days)\r\n # appending updated age in days\r\n else:\r\n result.append(-1)\r\n\r\n \r\n #find the country that is hosting this domain\r\n if(w.country == None):\r\n result.append(\"None\")\r\n else:\r\n if isinstance(w.country,str):\r\n result.append(w['country'])\r\n else:\r\n result.append(w['country'][0])\r\n if get_ext(path.path) == '':\r\n result.append(\"None\")\r\n else:\r\n result.append(get_ext(path.path))\r\n result.append(str(label))\r\n return result", "def features(self):\r\n dict_data = []\r\n my_dict = {\r\n \":IODisc\":\"\",\r\n \"Group\": \"$System\",\r\n \"Comment\": \"\",\r\n \"Logged\": \"No\",\r\n \"EventLogged\": \"No\",\r\n \"EventLoggingPriority\": 0,\r\n \"RetentiveValue\": \"No\",\r\n \"InitialDisc\": \"Off\",\r\n \"OffMsg\": \"\",\r\n \"OnMsg\": \"\",\r\n \"AlarmState\": \"None\",\r\n \"AlarmPri\": 1,\r\n \"DConversion\": \"Direct\",\r\n \"AccessName\": \"HC\",\r\n \"ItemUseTagname\": \"No\",\r\n \"ItemName\": \"\",\r\n \"ReadOnly\": \"No\",\r\n \"AlarmComment\": \"\",\r\n \"AlarmAckModel\": 0,\r\n \"DSCAlarmDisable\": 0,\r\n \"DSCAlarmInhibitor\": \"\",\r\n \"SymbolicName\": \"\"\r\n }\r\n\r\n dict_data.append(my_dict)\r\n\r\n return(my_dict)", "def features(self) -> Optional[pulumi.Input['DevToolPortalFeatureSettingsArgs']]:\n return pulumi.get(self, \"features\")", "def capabilities(self):\n return []", "def _features_of(entry: _LexiconEntry) -> str:\n return entry[\"features\"]", "def get_all_features(self) :\n raise NotImplementedError", "def get_features_from_feature_server(url, query):\n\n logger.debug('url received: ' + url + ', query received: ' + query)\n\n features = []\n f = FeatureLayer(url = url)\n feature_set = f.query(where = query)\n for feature in feature_set:\n features.append(feature.as_dict)\n return features", "def test_support_SAFELIST(self):\n self.assertEqual(self._parseFeature(\"SAFELIST\"), True)", "def features(self):\n\n return self._features", "def create_tls_context(TLSSTRENGTH):\n\n #CREATE a CONTEXT that we can then update\n context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS)\n\n if TLSSTRENGTH == \"tls1_3\":\n context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLSv1_3)\n\n if TLSSTRENGTH == \"tls1_2\":\n context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLSv1_2)\n\n elif TLSSTRENGTH == \"tls1_1\":\n context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLSv1_1)\n\n elif TLSSTRENGTH == \"tls1\":\n context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLSv1)\n\n else:\n print(\"Valid TLS Protocol Not Found: Needs to be in OpenSSL format: tls_1, tls_1_1 tls_2\")\n return\n\n context.verify_mode = ssl.CERT_REQUIRED\n context.check_hostname = True\n context.load_default_certs()\n print(\"TLS Protocol Specified: {}\".format(TLSSTRENGTH))\n return context", "def detect_supported_caps():\n result = []\n # generate list of supported capabilities\n\n # Intel RDT L3 CAT\n if common.PQOS_API.is_l3_cat_supported():\n result.append(common.CAT_L3_CAP)\n\n # Intel RDT L2 CAT\n if common.PQOS_API.is_l2_cat_supported():\n result.append(common.CAT_L2_CAP)\n\n # Intel RDT MBA\n if common.PQOS_API.is_mba_supported():\n result.append(common.MBA_CAP)\n\n if sstbf.is_sstbf_enabled():\n result.append(common.SSTBF_CAP)\n\n if power.is_sstcp_enabled():\n result.append(common.POWER_CAP)\n\n return result", "def get_list_features(feature):\n result = np.array([])\n result = np.append(result,feature.mfcc)\n result = np.append(result,feature.d_mfcc)\n result = np.append(result,feature.lpc)\n result = np.append(result,feature.d_lpc)\n result = np.append(result,feature.zc_rate)\n result = np.append(result,feature.d_zc_rate)\n result = np.append(result,feature.spec_centroid)\n result = np.append(result,feature.d_spec_centroid)\n return result", "def protocol_names(self):\n l = self.protocols()\n retval = [str(k.name) for k in l]\n return retval", "def do_features_request(module_type=None, version=None, software=None):\n\n #  connect to database\n cur_db = connect_db(\"172.20.38.50\", \"mvelay\", \"user\", \"sandbox\")\n cursor = cur_db.cursor()\n\n # build whole query\n cur_query = \"\"\" SELECT feature, supported FROM t_feature\n WHERE module=\"%s\" AND version=\"%s\" AND sw=\"%s\";\"\"\" \\\n % (module_type[0], version[0], software[0])\n\n print cur_query\n cursor.execute(cur_query)\n results = cursor.fetchall()\n cursor.close()\n\n if results:\n results = results[:1000] # Limit to first 1000 results\n else:\n results = None\n\n return module_type[0], version[0], software[0], results", "def convert_xnli_examples_to_features(self):\n features = self.features\n lang_filtered_features = []\n for ex_index, example in enumerate(self.examples):\n language = example.guid.split('-')[1]\n if language in self.lang_list:\n lang_filtered_features.append(features[ex_index] + [language])\n return lang_filtered_features", "def guest_os_features(self) -> Sequence['outputs.GuestOsFeatureResponse']:\n return pulumi.get(self, \"guest_os_features\")", "def specialFeatures(self):\r\n return self._specialFeatures", "def org_apache_felix_https_jetty_protocols_included(self) -> ConfigNodePropertyArray:\n return self._org_apache_felix_https_jetty_protocols_included", "def supported_features(self):\n return SUPPORT_LGSMARTTV", "def test_result(self):\n connection = Connection(Context(SSLv23_METHOD), None)\n ciphers = connection.get_cipher_list()\n assert isinstance(ciphers, list)\n for cipher in ciphers:\n assert isinstance(cipher, str)", "def get_all_cipher():\n return OpenSSL.cipher_algo.keys()", "def get_feature_labels(self):\n\t\tfeature_labels = []\n\t\tfor feature, i in zip(self.feature_names,self.feature_mask):\n\t\t\tif i == True:\n\t\t\t\tfeature_labels.append(feature)\n\t\treturn feature_labels", "def get_all_ciphers(method):\n ssl_method = getattr(SSL, method.replace('.', '_') + '_METHOD')\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n try:\n context = SSL.Context(ssl_method)\n context.set_cipher_list(\"ALL:COMPLEMENTOFALL\")\n sock = SSL.Connection(context, sock)\n ciphers = sock.get_cipher_list()\n except SSL.Error:\n ciphers = []\n finally:\n sock.close()\n\n return ciphers", "def featureNames(self):\n return [feature.name for feature in self.features]", "def _extract_features(self):\n # print(os.getpid())\n return {n:self._extract_feature(f) for (n,f) in self.features.items()}", "def get_tls_factory(self):\n if not access(self.cert_path, R_OK):\n raise RuntimeError('Error: cert file at %s is not '\n 'readable' % self.cert_path)\n if not access(self.key_path, R_OK):\n raise RuntimeError('Error: key file at %s is not '\n 'readable' % self.key_path)\n if not HAVE_PYOPENSSL:\n raise RuntimeError('Error: running with TLS (cert and key) requires'\n ' pyOpenSSL, but it does not appear to be '\n 'installed. Please \"pip install pyOpenSSL\".')\n # check certs are readable\n cf = certificateOptionsFromFiles(self.key_path, self.cert_path)\n return cf", "def _supported_versions(self, jarm_details, grease):\n if (jarm_details[7] == \"1.2_SUPPORT\"):\n # TLS 1.3 is not supported.\n tls = [b\"\\x03\\x01\", b\"\\x03\\x02\", b\"\\x03\\x03\"]\n else:\n # TLS 1.3 is supported.\n tls = [b\"\\x03\\x01\", b\"\\x03\\x02\", b\"\\x03\\x03\", b\"\\x03\\x04\"]\n\n # Change supported version order, by default, the versions are from\n # oldest to newest.\n if jarm_details[8] != \"FORWARD\":\n tls = self._cipher_mung(tls, jarm_details[8])\n\n # Assemble the extension.\n ext = b\"\\x00\\x2b\"\n # Add GREASE if applicable.\n if grease:\n versions = self._choose_grease()\n else:\n versions = b\"\"\n\n for version in tls:\n versions += version\n\n second_length = len(versions)\n first_length = second_length+1\n ext += struct.pack(\">H\", first_length)\n ext += struct.pack(\">B\", second_length)\n ext += versions\n\n return ext", "def listentls(self, site):\n logger.warning('Setting TCP TLS listener on port %d for HTTPS requests',\n self.bind_port)\n self.reactor.listenSSL(self.bind_port, site, self.tls_factory)", "def songfeature_get(): # noqa: E501\n query = 'SELECT * FROM SongFeatures'\n results = query_to_dict(query)\n features_list = []\n for r in results:\n features_list.append(\n Songfeature(acousticness= r['Acousticness'],\n danceability= r['Danceability'],\n duration_ms= r['Duration_ms'],\n energy= r['Energy'],\n instrumentalness= r['Instrumentalness'],\n musicalkey= r['MusicalKey'],\n liveness= r['Liveness'],\n loudness= r['Loudness'],\n mode= r['Mode'],\n speechiness= r['Speechiness'],\n tempo= r['Tempo'],\n timesignature= r['Time_signature'],\n valence= r['Valence'],\n songid= r['SongID']))\n return features_list", "def get_features(self, request, **kwargs):\n raise NotImplementedError()", "def get_supported_extensions(ext=\".as\"):\n result = list(ext + x for x in LOADERS.keys())\n result.append(ext)\n return result", "def getFeatureNames(self):\n pass", "def get_protocols(self):\r\n\r\n return None", "def org_apache_felix_https_jetty_ciphersuites_included(self) -> ConfigNodePropertyArray:\n return self._org_apache_felix_https_jetty_ciphersuites_included", "def __init__(self):\n super(TLS12AuthenticationSuite, self).__init__()\n self._protocol = ssl.PROTOCOL_TLSv1_2\n self._ciphers = ':'.join((\n 'AES128-SHA256',\n 'AES256-SHA256',\n 'DH-DSS-AES256-SHA256',\n 'DH-DSS-AES128-SHA256',\n 'DH-RSA-AES128-SHA256',\n 'DHE-DSS-AES128-SHA256',\n 'DHE-RSA-AES128-SHA256',\n 'DH-DSS-AES256-SHA256',\n 'DH-RSA-AES256-SHA256',\n 'DHE-DSS-AES256-SHA256',\n 'DHE-RSA-AES256-SHA256',\n 'ECDH-ECDSA-AES128-SHA256',\n 'ECDH-ECDSA-AES256-SHA256',\n 'ECDHE-ECDSA-AES128-SHA256',\n 'ECDHE-ECDSA-AES256-SHA384',\n 'ECDH-RSA-AES128-SHA256',\n 'ECDH-RSA-AES256-SHA384',\n 'ECDHE-RSA-AES128-SHA256',\n 'ECDHE-RSA-AES256-SHA384',\n 'ECDHE-ECDSA-AES128-GCM-SHA256',\n 'ECDHE-ECDSA-AES256-GCM-SHA384',\n 'ECDHE-ECDSA-AES128-SHA256',\n 'ECDHE-ECDSA-AES256-SHA384',\n ))", "def supported_features(self):\n return SUPPORT_FLAGS_HEATER", "def feature_label(features):\n f=[]\n l=[]\n for item in features:\n f.append(item[0])\n l.append(item[1])\n return f,l", "def tls_enabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"tls_enabled\")", "def find_features(pyr):\n\n feature_lst = spread_out_corners(pyr[0], 3, 3, 3)\n descriptors = sample_descriptor(pyr[2],feature_lst/4,3)\n return [feature_lst,descriptors]", "def list_opts():\n return [(constants.MLNX_BAREMETAL_DRIVER_GROUP_NAME, DRIVER_OPTS)]" ]
[ "0.6554773", "0.6229621", "0.6155694", "0.6152324", "0.6034768", "0.59933424", "0.5971594", "0.5917329", "0.5915376", "0.5915376", "0.5915376", "0.5915376", "0.5885361", "0.5870308", "0.58026606", "0.57677984", "0.5735279", "0.5718903", "0.57177716", "0.5692858", "0.56664056", "0.5648981", "0.5638508", "0.5638508", "0.5623107", "0.5575124", "0.55494374", "0.5540081", "0.55220354", "0.55200696", "0.55200696", "0.55200696", "0.55200696", "0.55039203", "0.54721004", "0.54610556", "0.54514974", "0.5445717", "0.54392046", "0.54392046", "0.54392046", "0.54339325", "0.5432276", "0.5427764", "0.5417394", "0.5416009", "0.5404898", "0.53946435", "0.5381066", "0.5377042", "0.5372152", "0.5355624", "0.5349212", "0.53448904", "0.53355306", "0.5330959", "0.5329252", "0.53209054", "0.53002787", "0.5290914", "0.5279659", "0.5277736", "0.52330697", "0.5221124", "0.52067405", "0.5191504", "0.51688874", "0.5161514", "0.5152268", "0.5149151", "0.5148679", "0.5145661", "0.5140333", "0.5135143", "0.5123583", "0.51195943", "0.5115276", "0.511465", "0.511122", "0.51017463", "0.50847566", "0.5077024", "0.50765765", "0.5074295", "0.50718474", "0.5071037", "0.5069794", "0.50610644", "0.50575876", "0.5054124", "0.50401807", "0.5034724", "0.5020008", "0.50131243", "0.501087", "0.50038844", "0.5001856", "0.4995313", "0.4982146", "0.4980355" ]
0.77832854
0
Add common extensions to Cert or CSR builder.
def install_extensions(self, builder): # BasicConstraints, critical if self.ca: ext = x509.BasicConstraints(ca=True, path_length=self.path_length) else: ext = x509.BasicConstraints(ca=False, path_length=None) builder = builder.add_extension(ext, critical=True) # KeyUsage, critical ku_args = {k: k in self.usage for k in KU_FIELDS} if self.ca: ku_args.update(CA_DEFAULTS) elif not self.usage: ku_args.update(NONCA_DEFAULTS) for k in XKU_DEFAULTS: if k in self.usage: for k2 in XKU_DEFAULTS[k]: ku_args[k2] = True ext = make_key_usage(**ku_args) builder = builder.add_extension(ext, critical=True) # ExtendedKeyUsage, critical xku = [x for x in self.usage if x not in KU_FIELDS] xku_bad = [x for x in xku if x not in XKU_CODE_TO_OID] if xku_bad: raise InvalidCertificate("Unknown usage keywords: %s" % (','.join(xku_bad),)) if xku: xku_oids = [XKU_CODE_TO_OID[x] for x in xku] ext = x509.ExtendedKeyUsage(xku_oids) builder = builder.add_extension(ext, critical=True) # NameConstraints, critical if (self.exclude_subtrees or self.permit_subtrees) and self.ca: allow = self.load_gnames(self.permit_subtrees) or None disallow = self.load_gnames(self.exclude_subtrees) or None ext = x509.NameConstraints(allow, disallow) builder = builder.add_extension(ext, critical=True) # SubjectAlternativeName if self.san: ext = x509.SubjectAlternativeName(self.get_san_gnames()) builder = builder.add_extension(ext, critical=False) # CRLDistributionPoints if self.crl_urls: full_names = self.get_crl_gnames() reasons = None crl_issuer = None point = x509.DistributionPoint(full_names, None, reasons, crl_issuer) ext = x509.CRLDistributionPoints([point]) builder = builder.add_extension(ext, critical=False) # AuthorityInformationAccess if self.ocsp_urls or self.issuer_urls: oid = AuthorityInformationAccessOID.OCSP ocsp_list = [x509.AccessDescription(oid, gn) for gn in self.get_ocsp_gnames()] oid = AuthorityInformationAccessOID.CA_ISSUERS ca_list = [x509.AccessDescription(oid, gn) for gn in self.get_issuer_urls_gnames()] ext = x509.AuthorityInformationAccess(ocsp_list + ca_list) builder = builder.add_extension(ext, critical=False) # OCSPNoCheck if self.ocsp_nocheck: ext = x509.OCSPNoCheck() builder = builder.add_extension(ext, critical=False) # TLSFeature: status_request, status_request_v2 tls_features = self.get_tls_features() if tls_features: ext = x509.TLSFeature(tls_features) builder = builder.add_extension(ext, critical=False) # configured builder return builder
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_extensions(self, ext_stack):\n return m2.x509_req_add_extensions(self.req, ext_stack._ptr())", "def build_extension(self, ext):\n build_all()\n super(cffiBuilder, self).build_extension(ext)", "def build_extensions(self):\n c = self.compiler.compiler_type\n CF = [] ; LF=[]\n if \"CFLAGS\" in os.environ:\n CF = os.environ.get(\"CFLAGS\").split(\" \")\n if \"LDFLAGS\" in os.environ:\n LF = os.environ.get(\"LDFLAGS\").split(\" \")\n for e in self.extensions:\n if c in copt:\n e.extra_compile_args = copt[ c ] + CF\n e.extra_link_args = lopt[ c ] + LF\n print(\"Customised compiler\",c,e.extra_compile_args,\n e.extra_link_args)\n build_ext.build_ext.build_extensions(self)", "def add_ext(self, ext):\n assert m2.x509_type_check(self.x509), \"'x509' type error\"\n return m2.x509_add_ext(self.x509, ext.x509_ext, -1)", "def build_extensions(self):\n # TODO: move build customization here?\n build_ext.build_extensions(self)", "def add_extension(self, new_ext):\n if not isinstance(new_ext, extension.X509Extension):\n raise errors.X509Error(\"ext is not an anchor X509Extension\")\n attributes = self.get_attributes()\n ext_attrs = [a for a in attributes\n if a['attrType'] == OID_extensionRequest]\n if not ext_attrs:\n new_attr_index = len(attributes)\n attributes[new_attr_index] = None\n ext_attr = attributes[new_attr_index]\n ext_attr['attrType'] = OID_extensionRequest\n ext_attr['attrValues'] = None\n exts = rfc5280.Extensions()\n else:\n ext_attr = ext_attrs[0]\n exts = decoder.decode(ext_attr['attrValues'][0].asOctets(),\n asn1Spec=rfc5280.Extensions())[0]\n\n # the end is the default position\n new_ext_index = len(exts)\n # unless there's an existing extension with the same OID\n for i, ext_i in enumerate(exts):\n if ext_i['extnID'] == new_ext.get_oid():\n new_ext_index = i\n break\n\n exts[new_ext_index] = new_ext._ext\n\n ext_attr['attrValues'][0] = encoder.encode(exts)", "def _init_misc_extensions(self):\n self.certificateTypes = list(CERTIFICATE_TYPES)\n self.useExperimentalTackExtension = False\n self.sendFallbackSCSV = False\n self.useEncryptThenMAC = True\n self.ecdsaSigHashes = list(ECDSA_SIGNATURE_HASHES)\n self.more_sig_schemes = list(SIGNATURE_SCHEMES)\n self.usePaddingExtension = True\n self.useExtendedMasterSecret = True\n self.requireExtendedMasterSecret = False\n # PSKs\n self.pskConfigs = []\n self.psk_modes = list(PSK_MODES)\n # session tickets\n self.ticketKeys = []\n self.ticketCipher = \"aes256gcm\"\n self.ticketLifetime = 24 * 60 * 60\n self.max_early_data = 2 ** 14 + 16 # full record + tag\n # send two tickets so that client can quickly ramp up number of\n # resumed connections (as tickets are single-use in TLS 1.3\n self.ticket_count = 2\n self.record_size_limit = 2**14 + 1 # TLS 1.3 includes content type", "def build_extensions(self):\n numpy_incl = resource_filename('numpy', 'core/include')\n for ext in self.extensions:\n ext.include_dirs.append(numpy_incl)\n\n # This explicitly calls the superclass method rather than the\n # usual super() invocation because distutils' build_class, of\n # which Cython's build_ext is a subclass, is an old-style class\n # in Python 2, which doesn't support `super`.\n cython_build_ext.build_extensions(self)", "def _extend_with_prefix(base, extensions, prefix):\n for key, value in extensions.items():\n base[prefix + key] = value", "def _set_extensions(self):\n\n self._critical_extensions = set()\n\n for extension in self['tbs_cert_list']['crl_extensions']:\n name = extension['extn_id'].native\n attribute_name = '_%s_value' % name\n if hasattr(self, attribute_name):\n setattr(self, attribute_name, extension['extn_value'].parsed)\n if extension['critical'].native:\n self._critical_extensions.add(name)\n\n self._processed_extensions = True", "def extend(self, extension):\n for element in extension:\n self.append(element)", "def add_extensions_to_browser(self, browser_capabilities, config_section):\n\n browser_name = self.shishito_support.get_opt(config_section, 'browser').lower()\n extensions = self.get_browser_extensions(config_section)\n\n if extensions:\n try:\n options_kw = BROWSER_KEYWORDS[browser_name][OPTIONS]\n exts_kw = BROWSER_KEYWORDS[browser_name][EXTENSIONS]\n browser_capabilities.setdefault(options_kw, {}).setdefault(exts_kw, [])\n except:\n return\n\n for extension in extensions:\n with open(extension, 'rb') as ext_file:\n extension_base64 = base64.b64encode(ext_file.read()).decode('UTF-8')\n browser_capabilities[options_kw][exts_kw].append(extension_base64)", "def add(self, raw_ext):\n if raw_ext is None:\n return\n if not isinstance(raw_ext, Extension):\n raw_ext = Extension(raw_ext.strip())\n if raw_ext.name == \"\":\n return\n safe_ext = raw_ext\n depname = self.make_depname(safe_ext.name)\n tczname = self.make_tczname(safe_ext.name)\n if tczname in self.excluded_extensions:\n return\n if tczname in self:\n # Copy the onboot and copy2fs flags if set.\n # Make sure not to overwrite a True with a False.\n if safe_ext.onboot:\n self.extensions[tczname].onboot = True\n if safe_ext.copy2fs:\n self.extensions[tczname].copy2fs = True\n # set implicit False if any copy is False\n self.extensions[tczname].implicit &= safe_ext.implicit\n # Combine alt_names list if needed\n for name in safe_ext.alt_names:\n if name in self.extensions[tczname].alt_names:\n continue\n self.extensions[tczname].alt_names.append(name)\n return\n safe_ext.name = tczname\n self.extensions[tczname] = safe_ext\n self.extension_depnames.add(depname)", "def new_cert(self, commonname, extensions=None):\n\n serial = self._get_serial()\n pkey = self._create_pkey(commonname, serial)\n self._create_cert(pkey, commonname, serial, extensions)", "def _set_extensions(self):\n\n self._critical_extensions = set()\n\n for extension in self['crl_entry_extensions']:\n name = extension['extn_id'].native\n attribute_name = '_%s_value' % name\n if hasattr(self, attribute_name):\n setattr(self, attribute_name, extension['extn_value'].parsed)\n if extension['critical'].native:\n self._critical_extensions.add(name)\n\n self._processed_extensions = True", "def extensions():\n\n pass", "def inject_extensions(self, extensions: Dict[str, str]) -> None:\n self.extensions = extensions", "def define_extensions(use_cython, use_openmp):\n if sys.platform.startswith('win'):\n # compile args from\n # https://msdn.microsoft.com/en-us/library/fwkeyyhe.aspx\n link_args = []\n compile_args = ['/O2', '/openmp']\n else:\n link_args = []\n compile_args = ['-Wno-unused-function', '-Wno-maybe-uninitialized', '-O3', '-ffast-math']\n if use_openmp:\n compile_args.append('-fopenmp')\n link_args.append('-fopenmp')\n\n if 'anaconda' not in sys.version.lower():\n compile_args.append('-march=native')\n\n # recommended approach is that the user can choose not to\n # compile the code using cython, they can instead just use\n # the .c file that's also distributed\n # http://cython.readthedocs.io/en/latest/src/reference/compilation.html#distributing-cython-modules\n src_ext = '.pyx' if use_cython else '.c'\n names = ['pairwise3']\n modules = [Extension(name,\n [os.path.join(name + src_ext)],\n extra_compile_args = compile_args,\n extra_link_args = link_args) for name in names]\n\n if use_cython:\n return cythonize(modules)\n else:\n return modules", "def detectExtensions(builder):\n print (\"Checking if C extensions can be compiled, don't be alarmed if \"\n \"a few compile errors are printed.\")\n\n if not builder._compile_helper(\"#define X 1\\n\"):\n print \"Compiler not found, skipping C extensions.\"\n return []\n\n # Extension modules to build.\n exts = [\n Extension(\"twisted.spread.cBanana\",\n [\"twisted/spread/cBanana.c\"],\n define_macros=builder.define_macros),\n ]\n\n # urllib.unquote accelerator\n exts.append( Extension(\"twisted.protocols._c_urlarg\",\n [\"twisted/protocols/_c_urlarg.c\"],\n define_macros=builder.define_macros) )\n\n if sys.platform == 'darwin':\n exts.append(\n Extension(\"twisted.internet.cfsupport\",\n [\"twisted/internet/cfsupport/cfsupport.c\"],\n extra_compile_args=['-w'],\n extra_link_args=['-framework','CoreFoundation',\n '-framework','CoreServices',\n '-framework','Carbon'],\n define_macros=builder.define_macros))\n\n if sys.platform == 'win32':\n exts.append( Extension(\"twisted.internet.iocpreactor._iocp\",\n [\"twisted/internet/iocpreactor/_iocp.c\"],\n libraries=[\"ws2_32\", \"mswsock\"],\n define_macros=builder.define_macros))\n\n return exts", "def cythonize_extensions(extensions):\n from Cython.Build import cythonize\n with cd(config.script_dir/'src'):\n cythonized = cythonize(\n extensions,\n language_level=3,\n nthreads=4,\n annotate=config.debug,\n # https://cython.readthedocs.io/en/latest/src/userguide/source_files_and_compilation.html#compiler-directives # noqa: E501\n compiler_directives={\n 'binding': True,\n 'boundscheck': False,\n 'wraparound': False,\n 'profile': config.debug and not config.pypy,\n 'linetrace': config.debug and not config.pypy,\n 'always_allow_keywords': True,\n 'embedsignature': True,\n 'emit_code_comments': True,\n 'initializedcheck': False,\n 'nonecheck': False,\n 'optimize.use_switch': True,\n # Warns about any variables that are implicitly declared\n # without a cdef declaration\n 'warn.undeclared': False,\n 'warn.unreachable': True,\n 'warn.maybe_uninitialized': False,\n 'warn.unused': True,\n 'warn.unused_arg': False,\n 'warn.unused_result': False,\n 'warn.multiple_declarators': True,\n },\n )\n for cy in cythonized:\n cy.sources[0] = 'src/' + cy.sources[0]\n return cythonized", "def add_custom_extension_structure(struct, opts):\n custom_extension_file_content = templates.extension(opts)\n filename = \"{}.py\".format(EXTENSION_FILE_NAME)\n path = [opts[\"project\"], \"src\", opts[\"package\"], filename]\n struct = helpers.ensure(struct, path,\n custom_extension_file_content,\n helpers.NO_OVERWRITE)\n\n return struct, opts", "def build_from_c_and_cpp_files(extensions):\n for extension in extensions:\n sources = []\n for sfile in extension.sources:\n path, ext = os.path.splitext(sfile)\n if ext in ('.pyx', '.py'):\n if extension.language == 'c++':\n ext = '.cpp'\n else:\n ext = '.c'\n sfile = path + ext\n sources.append(sfile)\n extension.sources = sources", "def extensions(cls):\n raise NotImplementedError", "def _extend_instruction(self, orig, extension):\n # keys that are turned into arrays & extended\n for ex_key in ['extends', 'then']:\n # Nothing to extend, skip out the pop at end\n if ex_key not in extension:\n continue\n # We can just copy it over\n elif ex_key not in orig:\n orig[ex_key] = extension[ex_key]\n else:\n # Wrap the original value in a list\n if not isinstance(orig[ex_key], list):\n orig[ex_key] = [orig[ex_key]]\n\n # Insert values at beginning if extension is also list, append otherwise.\n if isinstance(extension[ex_key], list):\n for i, v in enumerate(extension[ex_key]):\n orig[ex_key].insert(i, v)\n else:\n orig[ex_key].insert(0, extension[ex_key])\n\n # Clear out key for update at end\n extension.pop(ex_key)\n\n # keys that are updated\n for up_key in ['cookies', 'headers', 'posts']:\n # Nothing to update, skip out pop at end\n if up_key not in extension:\n continue\n # We can just copy it over\n elif up_key not in orig:\n orig[up_key] = extension[up_key]\n # If they're both dicts, then we update. If not, then a replace\n # will happen.\n else:\n orig_val = orig[up_key]\n up_val = extension[up_key]\n # Prefer orig_val\n if isinstance(orig_val, dict) and isinstance(up_val, dict):\n up_val.update(orig_val)\n orig[up_key] = up_val\n # Keep things available for total replacement.\n else:\n continue\n\n # Clear out key for update at end\n extension.pop(up_key)\n\n # everything else is replaced.\n orig.update(extension)", "def addExtension(self, ext):\n self.files[ext] = SourceFile(self.path, self.name + ext)", "def extensions(self):\n raise NotImplementedError()", "def new_extension(name, value, critical=0, _pyfree=1):\n if name == 'subjectKeyIdentifier' and \\\n value.strip('0123456789abcdefABCDEF:') is not '':\n raise ValueError('value must be precomputed hash')\n lhash = m2.x509v3_lhash()\n ctx = m2.x509v3_set_conf_lhash(lhash)\n x509_ext_ptr = m2.x509v3_ext_conf(lhash, ctx, name, value)\n x509_ext = X509_Extension(x509_ext_ptr, _pyfree)\n x509_ext.set_critical(critical)\n return x509_ext", "def define_extensions():\n import numpy as np\n build_dir = os.environ.get(\"PWD\")\n extra_compile_args=[\n '-O3',\n '-Wall',\n '-Wextra',\n '-Wno-unused-variable',\n '-D CYTHON_TRACE=1' if config.debug else '',\n '-D CYTHON_TRACE_NOGIL=1' if config.debug else '',\n ]\n extra_link_args = ['-Wl,-rpath,' + build_dir + '/src/qtestpy/lib']\n return cythonize_extensions([\n Extension(\n name='qtestpy.adapt',\n sources=['qtestpy/adapt.pyx'],\n include_dirs=['src/qtestpy', 'src/qtestpy/include', np.get_include()],\n library_dirs=['src/qtestpy/lib'],\n libraries=[':e.o'],\n extra_compile_args=[\n *extra_compile_args,\n # https://github.com/cython/cython/issues/2498\n '-D NPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION',\n # type punning is used to support GUIDs\n '-Wno-strict-aliasing',\n ],\n extra_link_args = extra_link_args,\n ),\n ])", "def add_extension(self, extension: t.Union[str, t.Type[\"Extension\"]]) -> None:\n self.extensions.update(load_extensions(self, [extension]))", "def extensions():\n exts = []\n exts.append(\n Extension(\n 'pytng.pytng',\n sources=glob('pytng/src/compression/*.c') + glob(\n 'pytng/src/lib/*.c') + ['pytng/pytng.pyx'],\n include_dirs=[\n \"pytng/include/\", \"{}/include\".format(sys.prefix),\n np.get_include()\n ],\n library_dirs=[\"{}/lib\".format(sys.prefix)],\n libraries=['z'], ))\n\n return cythonize(exts, gdb_debug=False)", "def _copy_extension_settings(self, other):\n other.useExtendedMasterSecret = self.useExtendedMasterSecret\n other.requireExtendedMasterSecret = self.requireExtendedMasterSecret\n other.useExperimentalTackExtension = self.useExperimentalTackExtension\n other.sendFallbackSCSV = self.sendFallbackSCSV\n other.useEncryptThenMAC = self.useEncryptThenMAC\n other.usePaddingExtension = self.usePaddingExtension\n # session tickets\n other.padding_cb = self.padding_cb\n other.ticketKeys = self.ticketKeys\n other.ticketCipher = self.ticketCipher\n other.ticketLifetime = self.ticketLifetime\n other.max_early_data = self.max_early_data\n other.ticket_count = self.ticket_count\n other.record_size_limit = self.record_size_limit", "def _add_extensions(self):\n ext_cache_down = 'cache_downloading'\n ext_cache_up = 'cache_uploading'\n cmd_args = self.task_data.get('cmd_args', {})\n if not isinstance(cmd_args, dict):\n cmd_args = {}\n if cmd_args.get('save_raw_pages', False):\n self.required_signals[SIGNAL_SPIDER_OPENED]['wait'] += \\\n EXTENSION_SIGNALS[ext_cache_up]\n if cmd_args.get('load_raw_pages'):\n self.required_signals[SIGNAL_SCRIPT_CLOSED]['wait'] += \\\n EXTENSION_SIGNALS[ext_cache_down]", "def _sanityCheckExtensions(other):\n if other.useEncryptThenMAC not in (True, False):\n raise ValueError(\"useEncryptThenMAC can only be True or False\")\n\n if other.usePaddingExtension not in (True, False):\n raise ValueError(\"usePaddingExtension must be True or False\")\n\n if other.use_heartbeat_extension not in (True, False):\n raise ValueError(\"use_heartbeat_extension must be True or False\")\n\n if other.heartbeat_response_callback and not other.use_heartbeat_extension:\n raise ValueError(\"heartbeat_response_callback requires \"\n \"use_heartbeat_extension\")\n\n if other.record_size_limit is not None and \\\n not 64 <= other.record_size_limit <= 2**14 + 1:\n raise ValueError(\"record_size_limit cannot exceed 2**14+1 bytes\")\n\n HandshakeSettings._sanityCheckEMSExtension(other)", "def build_extension(self, ext):\n if sys.platform == \"win32\":\n _clr_compiler = \"C:\\\\Windows\\\\Microsoft.NET\\\\Framework\\\\v4.0.30319\\\\csc.exe\"\n else:\n _clr_compiler = \"mcs\"\n cmd = [ \n _clr_compiler,\n \"/target:library\",\n \"clrmagic.cs\"\n ]\n check_call(\" \".join(cmd), shell=True)", "def set_extension(self, name, value, allow_deprecated=False):\n\n extension = x509.Extension({\n 'extn_id': name\n })\n # We use native here to convert OIDs to meaningful names\n name = extension['extn_id'].native\n\n if name in self._deprecated_extensions and not allow_deprecated:\n raise ValueError(_pretty_message(\n '''\n An extension of the type %s was added, however it is\n deprecated. Please add the parameter allow_deprecated=True to\n the method call.\n ''',\n name\n ))\n\n spec = extension.spec('extn_value')\n\n if not isinstance(value, spec) and value is not None:\n raise TypeError(_pretty_message(\n '''\n value must be an instance of %s, not %s\n ''',\n _type_name(spec),\n _type_name(value)\n ))\n\n if name in self._special_extensions:\n setattr(self, '_%s' % name, value)\n else:\n if value is None:\n if name in self._other_extensions:\n del self._other_extensions[name]\n else:\n self._other_extensions[name] = value", "def registerExtensions(self, extensions, configs):\n for ext in extensions:\n try:\n if isinstance(ext, util.string_type):\n ext = self.build_extension(ext, configs.get(ext, []))\n if isinstance(ext, Extension):\n ext.extendMarkdown(self, globals())\n elif ext is not None:\n raise TypeError(\n 'Extension \"%s.%s\" must be of type: \"markdown.Extension\"'\n % (ext.__class__.__module__, ext.__class__.__name__))\n except:\n print(str(traceback.format_exc()))\n continue\n\n return self", "def extend(source, add_attribute):\n\n ExtendCommandExecutor().extend(source, add_attribute)", "def test_get_built_in_extension(self):\n\n spec = {\n '$ext': {\n \"function\": \"random_string\",\n \"extra_args\": [4]\n }\n }\n\n validate_extensions(spec, None, None)", "def _convert_ext_attrs(self, ast):\n self.ext_attrs = IDLExtAttrs(ast)", "def addExtension(self, *args):\n return _libsbml.SBMLExtensionRegistry_addExtension(self, *args)", "def extensions(self) -> Tuple[str, ...]:\n raise NotImplementedError", "def addExtension(*args, nodeType: Union[AnyStr, bool]=\"\", attributeType: Union[AnyStr, bool]=\"\",\n binaryTag: Union[AnyStr, bool]=\"\", cachedInternally: bool=True, category:\n Union[AnyStr, List[AnyStr], bool]=\"\", dataType: Union[AnyStr, List[AnyStr],\n bool]=\"\", defaultValue: Union[float, bool]=0.0, disconnectBehaviour: Union[int,\n bool]=0, enumName: Union[AnyStr, bool]=\"\", exists: bool=True, fromPlugin:\n bool=True, hasMaxValue: bool=True, hasMinValue: bool=True, hasSoftMaxValue:\n bool=True, hasSoftMinValue: bool=True, hidden: bool=True, indexMatters:\n bool=True, internalSet: bool=True, keyable: bool=True, longName: Union[AnyStr,\n bool]=\"\", maxValue: Union[float, bool]=0.0, minValue: Union[float, bool]=0.0,\n multi: bool=True, niceName: Union[AnyStr, bool]=\"\", numberOfChildren:\n Union[int, bool]=0, parent: Union[AnyStr, bool]=\"\", proxy: Union[AnyStr,\n bool]=\"\", readable: bool=True, shortName: Union[AnyStr, bool]=\"\",\n softMaxValue: Union[float, bool]=0.0, softMinValue: Union[float, bool]=0.0,\n storable: bool=True, usedAsColor: bool=True, usedAsFilename: bool=True,\n usedAsProxy: bool=True, writable: bool=True, q=True, query=True, e=True,\n edit=True, **kwargs)->Union[None, Any]:\n pass", "def extensions(self, extensions):\n\n self._extensions = extensions", "def _add_info(self, msg, **kwargs):\n\n args, extensions = self._filter_args(msg, **kwargs)\n for key, val in args.items():\n setattr(msg, key, val)\n\n if extensions:\n if msg.extension_elements:\n msg.extension_elements.extend(extensions)\n else:\n msg.extension_elements = extensions", "def add_content_types():\n for ext in EXTRA_TYPES:\n mimetypes.add_type(EXTRA_TYPES[ext], ext)", "def add_extra_classes():\n\n for wire_version in [of_g.VERSION_1_2, of_g.VERSION_1_3]:\n version_name = of_g.of_version_wire2name[wire_version]\n oxm.add_oxm_classes_1_2(versions[version_name]['classes'], wire_version)", "def test_get_extension(self):\n\n spec = {\n \"$ext\": {\n \"function\": \"operator:add\",\n }\n }\n\n validate_extensions(spec, None, None)", "def register_extensions(app):\n grpc_channel = grpc.insecure_channel(\n f\"{app.config['GRPC_SERVICE']}:{app.config['GRPC_PORT']}\",\n )\n grpc_client = GrpcClient(grpc_channel)\n grpc_client.init_app(app)", "def extend(doc):\n # Escape if extending null documents.\n if doc is None:\n return\n\n # Verify that document type is supported.\n if type(doc) not in _TYPES:\n rt.throw(\"Unsupported document type: {0}.\".format(type(doc)))\n\n # Initialize document extension information.\n doc.ext = DocumentExtensionInfo()\n\n # Instantiate extension context.\n ctx = _ExtensionContextInfo(doc, doc.meta, doc.ext)\n\n # Step 1: invoke default pre-extenders.\n for extender in default.PRE_EXTENDERS:\n extender(ctx)\n\n # Step 2: invoke type specific extenders.\n if is_extendable(doc):\n for extender in SUPPORTED[doc.type_key.lower()].EXTENDERS:\n extender(ctx)\n\n # Step 3: invoke default post-extenders.\n for extender in default.POST_EXTENDERS:\n extender(ctx)\n\n return doc", "def extra_options():\n extra_vars = {\n 'PrgEnv': [None, 'PrgEnv module to load, e.g., cray to load PrgEnv-cray, or None for automatic determination', CUSTOM],\n 'PrgEnv_load': [True, 'Load the PrgEnv module (if True) or just set the corresponding environment variable (if False)', CUSTOM],\n 'PrgEnv_family': [None, 'Declare to be a member of the PrgEnv family (if \\'PrgEnv\\), of the cpeToolchain family (if \\'cpeToolchain\\') or manually unload all known PrgEnv and cpe* modules (if None, needed when LMOD is not used)', CUSTOM],\n 'CPE_compiler': [None, 'Versionless compiler module to load, or None for automatic determination', CUSTOM],\n 'CPE_version': [None, 'Version of the CPE, if different from the version of the module', CUSTOM],\n 'CPE_load': [ 'first', 'First load the cpe module (if \\'first\\'), after the PrgEnv module (if \\'after\\'), load it at the end (if \\'last\\'), or do not load the cpe module (if None)', CUSTOM],\n 'cray_targets': [[], 'Targetting modules to load', CUSTOM],\n #'optional_example_param': [None, \"Example optional custom parameter\", CUSTOM],\n }\n return Bundle.extra_options(extra_vars)", "def register_extensions(app):\n\n extensions.db.init_app(app)\n extensions.jwt.init_app(app)", "def init_extensions(self, package, module):\n\n pass", "def add_extension(self, extension):\n if self.extensions is None:\n self.extensions = set()\n\n self.extensions.add(extension)", "def create_all_database_extensions(self) -> str:\n unique_databases = set(self._get_all_databases())\n for database in unique_databases:\n # load any pg extensions that are required\n db_conn = self.get_connection(database_override=database)\n for ext in self.extensions:\n statement = f'create extension if not exists \\\"{ext}\\\"'\n try:\n db_conn.execute(statement)\n except sqlalchemy.exc.IntegrityError as error:\n logger.error(\n 'Duplicate extension creation of %s caused an error:\\n%s', ext, error)", "def get_extensions(self, ext_type=None):\n ext_attrs = [a for a in self.get_attributes()\n if a['attrType'] == OID_extensionRequest]\n if len(ext_attrs) == 0:\n return []\n else:\n exts_der = ext_attrs[0]['attrValues'][0].asOctets()\n exts = decoder.decode(exts_der, asn1Spec=rfc5280.Extensions())[0]\n return [extension.construct_extension(e) for e in exts\n if ext_type is None or e['extnID'] == ext_type._oid]", "def test_add_extra_chain_cert_invalid_cert(self):\n context = Context(SSLv23_METHOD)\n with pytest.raises(TypeError):\n context.add_extra_chain_cert(object())", "def add_options(self, parser):\n parser.add_argument(\n '--name',\n required=True,\n help='The human-readable name for the extension. This is '\n 'required.')\n parser.add_argument(\n '--class-name',\n default=None,\n help='The class name for the extension (generally in CamelCase '\n 'form, without spaces). If not provided, this will be '\n 'based on the extension name.')\n parser.add_argument(\n '--package-name',\n default=None,\n help='The name of the package (using alphanumeric ). '\n 'If not provided, this will be based on the extension '\n 'name.')\n parser.add_argument(\n '--package-version',\n default='1.0',\n help='The version for your extension and package.')\n parser.add_argument(\n '--summary',\n default=None,\n help='A one-line summary of the extension.')\n parser.add_argument(\n '--description',\n default=None,\n help='A short description of the extension.')\n parser.add_argument(\n '--author-name',\n default=None,\n help='The name of the author for the package and extension '\n 'metadata. This can be a company name.')\n parser.add_argument(\n '--author-email',\n default=None,\n help='The e-mail address of the author for the package and '\n 'extension metadata.')\n parser.add_argument(\n '--enable-configuration',\n action='store_true',\n default=False,\n help='Whether to enable a Configure button and view for the '\n 'extension.')\n parser.add_argument(\n '--enable-static-media',\n action='store_true',\n default=False,\n help='Whether to enable static media files for the package.')", "def register_extensions(app):\n\n app = extensions.create_admin(app)\n extensions.db.init_app(app)\n extensions.jwt.init_app(app)", "def set_extensions_to_default(self):\n if self.cert_type is SSHCertificateType.USER:\n self.extensions = {'permit-X11-forwarding',\n 'permit-agent-forwarding',\n 'permit-port-forwarding',\n 'permit-pty', 'permit-user-rc'}\n else:\n # SSHCertificateType.HOST has no applicable extensions.\n self.clear_extensions()", "def extend_path(self, ext):\n ext = str(ext)\n self._path.append(ext)", "def add_static(ext):\n ext = ext.lower()\n\n compiler = StaticCompiler(ext)\n file_list = compiler.get_staticfiles_list()\n\n return render_to_string(\n \"mub/context_%s.html\" % ext,\n {\n \"items\": file_list,\n \"STATIC_URL\": settings.STATIC_URL,\n \"IS_MINIFIED\": compiler.is_minified\n }\n )", "def build_extension(self, ext_name, configs=[]):\n\n # Parse extensions config params (ignore the order)\n configs = dict(configs)\n pos = ext_name.find(\"(\") # find the first \"(\"\n if pos > 0:\n ext_args = ext_name[pos + 1:-1]\n ext_name = ext_name[:pos]\n pairs = [x.split(\"=\") for x in ext_args.split(\",\")]\n configs.update([(x.strip(), y.strip()) for (x, y) in pairs])\n\n # Setup the module name\n module_name = ext_name\n if '.' not in ext_name:\n if ST3:\n from .helper import INSTALLED_DIRECTORY\n module_name = '.'.join([INSTALLED_DIRECTORY, 'markdown.extensions', ext_name])\n else:\n module_name = '.'.join(['markdown.extensions', ext_name])\n\n # Try loading the extension first from one place, then another\n try: # New style (markdown.extensons.<extension>)\n if ST3:\n module = importlib.import_module(module_name)\n else:\n module = __import__(module_name, {}, {}, [module_name.rpartition('.')[0]])\n except ImportError:\n module_name_old_style = '_'.join(['mdx', ext_name])\n try: # Old style (mdx_<extension>)\n module = __import__(module_name_old_style)\n except ImportError as e:\n message = \"Failed loading extension '%s' from '%s' or '%s'\" \\\n % (ext_name, module_name, module_name_old_style)\n e.args = (message,) + e.args[1:]\n raise\n\n # If the module is loaded successfully, we expect it to define a\n # function called makeExtension()\n try:\n return module.makeExtension(configs.items())\n except AttributeError as e:\n message = e.args[0]\n message = \"Failed to initiate extension \" \\\n \"'%s': %s\" % (ext_name, message)\n e.args = (message,) + e.args[1:]\n raise", "def _parse_extensions(self):\n for root in self.roots:\n for extensions in root.iter('extensions'):\n for extension in extensions.iter('extension'):\n extension_name = extension.attrib.get('name', '')\n #print(f'Extension: {extension_name}')\n self.extensions.append(extension_name)\n\n extension_apis = extension.attrib.get('supported', '')\n extension_api_list = set(extension_apis.split('|'))\n\n # filter by api\n if 'gl' not in extension_apis:\n continue\n\n for require in extension.iter('require'):\n for enum in require.iter('enum'):\n enum_name = enum.attrib.get('name', '')\n self.enum_list.append(enum_name)\n self.enum_required_by_extension[enum_name].append({\n \"name\": extension_name,\n \"api_list\": extension_api_list})\n for command in require.iter('command'):\n command_name = command.attrib['name']\n self.command_list.append(command_name)\n self.command_required_by_extension[command_name].append({\n \"name\": extension_name,\n \"api_list\": extension_api_list})", "def add_ext_if_needed(fileName, ext):\n ls = fileName.split(\".\")\n if( ( len(ls)==1) or (not (ls[-1] == ext))):\n return fileName + \".\" + ext\n else:\n return fileName", "def calcNetExtension(initA, accel, srA1, srA2, dipFaultA, initB, srB1, srB2, dipFaultB):\n\t# fault A extension\n\tslipA1 = (initA - accel) * srA1\n\t\n\tslipA2 = accel * srA2\n\t\n\tnetSlipA = slipA1 + slipA2\n\t\n\tnetExtensionA = netSlipA * nmp.cos(dipFaultA)\n\t\n\t#fault B extension\n\tslipB1 = (initB - accel) * srB1\n\t\n\tslipB2 = accel * srB2\n\t\n\tnetSlipB = slipB1 + slipB2\n\t\n\tnetExtensionB = netSlipB * nmp.cos(dipFaultB)\t\n\n\t\n\t# total extension\n\t\n\tnetExtension = netExtensionA + netExtensionB\n\t\n\treturn netExtension", "def make_module_extra(self):\n\n txt = super(EB_icc, self).make_module_extra()\n\n txt += \"prepend-path\\t%s\\t\\t%s\\n\" % (self.license_env_var, self.license_file)\n txt += \"prepend-path\\t%s\\t\\t$root/%s\\n\" % ('NLSPATH', 'idb/intel64/locale/%l_%t/%N')\n\n return txt", "def load_from_existing(self, obj):\n self.subject = self.extract_name(obj.subject)\n\n for ext in obj.extensions:\n crit = ext.critical\n extobj = ext.value\n if ext.oid == ExtensionOID.BASIC_CONSTRAINTS:\n if not crit:\n raise InvalidCertificate(\"BASIC_CONSTRAINTS must be critical\")\n self.ca = extobj.ca\n self.path_length = None\n if self.ca:\n self.path_length = extobj.path_length\n elif ext.oid == ExtensionOID.KEY_USAGE:\n if not crit:\n raise InvalidCertificate(\"KEY_USAGE must be critical\")\n self.usage += self.extract_key_usage(extobj)\n elif ext.oid == ExtensionOID.SUBJECT_ALTERNATIVE_NAME:\n self.san = self.extract_gnames(extobj)\n elif ext.oid == ExtensionOID.EXTENDED_KEY_USAGE:\n self.usage += self.extract_xkey_usage(extobj)\n elif ext.oid == ExtensionOID.AUTHORITY_INFORMATION_ACCESS:\n for ad in extobj:\n if not isinstance(ad.access_location, x509.UniformResourceIdentifier):\n InvalidCertificate(\"Unsupported access_location: %s\" % (ad.access_location,))\n url = as_unicode(ad.access_location.value)\n\n if ad.access_method == AuthorityInformationAccessOID.CA_ISSUERS:\n self.issuer_urls.append(url)\n elif ad.access_method == AuthorityInformationAccessOID.OCSP:\n self.ocsp_urls.append(url)\n else:\n raise InvalidCertificate(\"Unsupported access_method: %s\" % (ad.access_method,))\n elif ext.oid == ExtensionOID.CRL_DISTRIBUTION_POINTS:\n for dp in extobj:\n if dp.relative_name:\n raise InvalidCertificate(\"DistributionPoint.relative_name not supported\")\n if dp.crl_issuer:\n raise InvalidCertificate(\"DistributionPoint.crl_issuer not supported\")\n if dp.reasons:\n raise InvalidCertificate(\"DistributionPoint.reasons not supported\")\n\n for gn in self.extract_gnames(dp.full_name):\n if gn.startswith('uri:'):\n self.crl_urls.append(gn[4:])\n else:\n raise InvalidCertificate(\"Unsupported DistributionPoint: %s\" % (gn,))\n elif ext.oid == ExtensionOID.NAME_CONSTRAINTS:\n self.permit_subtrees = self.extract_gnames(extobj.permitted_subtrees)\n self.exclude_subtrees = self.extract_gnames(extobj.excluded_subtrees)\n elif ext.oid == ExtensionOID.SUBJECT_KEY_IDENTIFIER:\n pass\n elif ext.oid == ExtensionOID.AUTHORITY_KEY_IDENTIFIER:\n pass\n elif ext.oid == ExtensionOID.OCSP_NO_CHECK:\n self.ocsp_nocheck = True\n elif ext.oid == ExtensionOID.TLS_FEATURE:\n for tls_feature_code in extobj:\n if tls_feature_code == x509.TLSFeatureType.status_request:\n self.ocsp_must_staple = True\n elif tls_feature_code == x509.TLSFeatureType.status_request_v2:\n self.ocsp_must_staple_v2 = True\n else:\n raise InvalidCertificate(\"Unsupported TLSFeature: %r\" % (tls_feature_code,))\n else:\n raise InvalidCertificate(\"Unsupported extension in CSR: %s\" % (ext,))", "def get_extension_funcs():\n raise NotImplementedError()", "def _init_keys(self):\n\n basic_constraints = crypto.X509Extension('basicConstraints'.encode('ascii'), True,\n 'CA:TRUE, pathlen:0'.encode('ascii'))\n serial = self._get_serial()\n pkey = self._create_pkey(self.commonname, serial)\n self._create_cert(pkey, self.commonname, serial, [basic_constraints], expire=30*365)", "def build_extension(self, ext):\r\n if isinstance(ext, InnoSetupExtension):\r\n self.extract_inno_setup(ext)\r\n else:\r\n super(build_ext, self).build_extension(ext)", "def get_request_extensions(self):\n return []", "def add_extLst(src_xml, des_xml, ext_lst, tag_dict):\n inp_root,_ = gen_tree(src_xml)\n out_root, out_tree = gen_tree(des_xml)\n \n for relation in ext_lst:\n \n # if relation in tag_dict.keys():\n # print(\"JJJ: \", relation)\n # print(\"PPP: \", tag_dict[relation])\n for elt in inp_root.findall(relation):\n # print(\"ELE: \", elt.tag)\n out_root.append(elt)\n\n out_tree.write(des_xml, pretty_print=True, xml_declaration=True, encoding='UTF-8', standalone=True)\n return", "def load_extensions(self):\n extension_module_name = f\"{utils.get_project_name()}.cogs\"\n for extension in CONF.LOADED_EXTENSIONS:\n try:\n self.load_extension(extension_module_name + \".\" + extension)\n LOG.debug(f\"The extension '{extension.split('.')[0]}' has been successfully loaded\")\n except Exception as e:\n message = f\"Failed to load extension '{extension.split('.')[0]}'\"\n LOG.exception(log.get_log_exception_message(message, e))", "def filter_ext(exts=[]):\n\n def decorator(function):\n\n def wrapper(*args, **kwargs):\n\n files = function(*args, **kwargs)\n return [file for file in files if file.split('.')[-1] in exts]\n\n return wrapper\n\n return decorator", "def register_extensions(app):\n db.init_app(app)\n migrate.init_app(app, db)\n login.init_app(app)\n mail.init_app(app)\n babel.init_app(app)", "def add_extra_compiler_flags(self, op):\n if is_listing(op):\n for ii in op:\n self.add_extra_compiler_flags(ii)\n elif not op in self.__include_directories and not op in self.__definitions:\n self.__compiler_flags_extra += [op]", "def merge_new_overrides():\n # Take the dex config as is:\n new_doc = {'config': copy.deepcopy(DEFINES['dex_config'])}\n # Convert old dex certs.web.secret to https-tls volume/volumeMounts\n mount = {'mountPath': get_httpstls_mount(), 'name': 'https-tls'}\n vol = {'secret': {'secretName': get_httpstls_secret(),\n 'defaultMode': DEFAULT_HTTPSTLS_MODE},\n 'name': 'https-tls'}\n # Take 'extra' volumes and mounts that may exist in old dex\n # This is expected to be the WAD certificate\n volumes = []\n volumeMounts = []\n if 'volumes' in DEFINES:\n volumes = copy.deepcopy(DEFINES['volumes'])\n if 'volumeMounts' in DEFINES:\n volumeMounts = copy.deepcopy(DEFINES['volumeMounts'])\n\n # only add volumes/mounts if 'extra' was specified, or\n # if there was non-default mount\n if volumes or 'tls_secret' in DEFINES:\n volumes.append(vol)\n if volumeMounts or 'dex_https_tlsCert' in DEFINES:\n volumeMounts.append(mount)\n if volumes:\n new_doc['volumes'] = volumes\n if volumeMounts:\n new_doc['volumeMounts'] = volumeMounts\n return new_doc", "def _add_file_extension(file_name: str, extension: str) -> str:\n fname = file_name.strip()\n slice_offset = -1 * (len(extension) + 1)\n if fname[slice_offset:] != f\".{extension}\":\n fname = fname + f\".{extension}\"\n return fname", "def maybe_cythonize_extensions(top_path, config):\n is_release = os.path.exists(os.path.join(top_path, 'PKG-INFO'))\n\n if is_release:\n build_from_c_and_cpp_files(config.ext_modules)\n else:\n message = ('Please install cython with a version >= {0} in order '\n 'to build a scikit-learn development version.').format(\n CYTHON_MIN_VERSION)\n try:\n import Cython\n if LooseVersion(Cython.__version__) < CYTHON_MIN_VERSION:\n message += ' Your version of Cython was {0}.'.format(\n Cython.__version__)\n raise ValueError(message)\n from Cython.Build import cythonize\n except ImportError as exc:\n exc.args += (message,)\n raise\n\n config.ext_modules = cythonize(config.ext_modules)", "def extend(\n hub: pop.hub.Hub,\n subname: str,\n pypath: List[str] or str = None,\n static: List[str] or str = None,\n contracts_pypath: List[str] or str = None,\n contracts_static: List[str] or str = None,\n) -> bool:\n if not hasattr(hub, subname):\n return False\n sub = getattr(hub, subname)\n if pypath:\n sub._pypath.extend(pop.hub.ex_path(pypath))\n if static:\n sub._static.extend(pop.hub.ex_path(static))\n if contracts_pypath:\n sub._contracts_pypath.extend(pop.hub.ex_path(contracts_pypath))\n if contracts_static:\n sub._contracts_static.extend(pop.hub.ex_path(contracts_static))\n sub._prepare()\n return True", "def chip_ext(x, ext):\n return os.path.join(chipseq_dir, conf.chipDir, x + ext)", "def from_map(self, inexts, langtempls, newdb):\n for key in inexts:\n if not key.startswith('extension '):\n raise KeyError(\"Unrecognized object type: %s\" % key)\n ext = key[10:]\n inexten = inexts[key]\n self[ext] = Extension(name=ext,\n description=inexten.get('description'),\n owner=inexten.get('owner'),\n schema=inexten['schema'],\n version=inexten.get('version'))\n if self[ext].name in langtempls:\n lang = {'language %s' % self[ext].name: {'_ext': 'e'}}\n newdb.languages.from_map(lang)", "def extensions(cls):\n return ['ma', 'mb']", "def extend(self, extension):\n if not self.regular:\n raise ValueError(\"{} instances must be regular (have bins of \"\n \"equal size and shape) to compute extensions\"\n .format(self.__class__.__name__))\n\n old_edges = self.edges\n new_edges = []\n widths = (numpy.mean(w) for w in self.binwidths)\n for (ext, old_edge, width) in zip(extension, old_edges, widths):\n old_min, old_max = old_edge[(0, -1), ]\n new_start = numpy.arange(old_min - width * ext[0],\n old_min - width * 0.5, width)\n new_end = numpy.arange(old_max + width,\n old_max + width * (ext[1] + 0.5), width)\n new_edge = numpy.concatenate((new_start, old_edge, new_end))\n new_edges.append(new_edge)\n\n # Append remaining unchanged edge arrays\n new_edges += old_edges[len(new_edges):]\n\n return type(self)(new_edges)", "def common_set_options(opt):\n opt.tool_options('compiler_cxx')\n opt.tool_options('compiler_cc')\n opt.tool_options('python')\n \n opt.add_option('--wxpython', action='store_true', default=False, help='Create the wxPython bindings.')\n opt.add_option('--wx-compiler-prefix', action='store', default='vc',\n help='Specify a different compiler prefix (do this if you used COMPILER_PREFIX when building wx itself)')\n opt.add_option('--macosx-version', action='store', default='', help=\"Version of OS X to build for.\")", "def get_supported_extensions(ext=\".as\"):\n result = list(ext + x for x in LOADERS.keys())\n result.append(ext)\n return result", "def install_extensions(ext_root):\n # Make sure the destination folder exists.\n if not isdir(ext_root):\n os.makedirs(ext_root)\n\n # Copy all build results to plotdevice/lib dir\n for extension in glob(\"%s/*/build/lib*\"%DEPS):\n cmd = 'cp -R -p %s/* %s' % (extension, ext_root)\n result = call(cmd, shell=True)\n if result > 0:\n lib_name = dirname(dirname(extension))\n raise OSError(\"Could not copy %s\" % lib_name)\n print()", "def append_before_ext(fullfile: Text, thing_to_append: Text):\n base, ext = os.path.splitext(fullfile)\n return '{}{}{}'.format(base, thing_to_append, ext)", "def create_fileters(*exts):\n ret = []\n for e in exts:\n ret += ['{} (*.{})'.format(*e)]\n return ret", "def load_extensions(self, *exts):\n for ext in exts:\n try:\n self.load_extension(ext)\n logger.info(f\"Successfully loaded cog {ext}.\")\n except Exception:\n logger.error(f\"Failed to load cog: {ext}: {format_exc()}\")\n\n logger.info(\"Cog loading complete.\")", "def addExtension(self, extension_response):\n extension_response.toMessage(self.fields)", "def add_entry_ext(self, dn, attrs, serverctrls=None, clientctrls=None):\n # Convert our dict to nice syntax for the add-function using modlist-module\n if attrs and dn:\n ldif = modlist.addModlist(attrs)\n # Do the actual synchronous add-operation to the ldapserver\n logger.info(\"add entry %s.\" % ldif)\n ldap.CONTROL_POST_READ\n return self._conn.add_ext_s(dn, ldif, serverctrls=serverctrls, clientctrls=clientctrls)", "def extend(cust, prod, rent, n):\n\n cust_f, cust_keys = _extend(cust, n)\n prod_f, prod_keys = _extend(prod, n)\n rent_f, _ = _extend(rent, n, keys=(list(cust_keys), list(prod_keys)))\n\n return cust_f, prod_f, rent_f", "def _scan_for_device_extensions_under_code_container(self, container):\n extcoll = collect_extensions_under_code_container(container, UpnpRootDevice)\n for _, extcls in extcoll:\n if hasattr(extcls, \"MANUFACTURER\") and hasattr(extcls, \"MODEL_NUMBER\") and hasattr(extcls, \"MODEL_DESCRIPTION\"):\n extkey = generate_extension_key(getattr(extcls, \"MANUFACTURER\"),\n getattr(extcls, \"MODEL_NUMBER\"), getattr(extcls, \"MODEL_DESCRIPTION\"))\n self._register_root_device(extkey, extcls)\n return", "def extend(self, *args, **kwargs): # real signature unknown\n pass", "def element_extensions(self, site_id, element_id, extension_id, data, tenant_id=None, api_version=\"v2.0\"):\n\n if tenant_id is None and self._parent_class.tenant_id:\n # Pull tenant_id from parent namespace cache.\n tenant_id = self._parent_class.tenant_id\n elif not tenant_id:\n # No value for tenant_id.\n raise TypeError(\"tenant_id is required but not set or cached.\")\n cur_ctlr = self._parent_class.controller\n\n url = str(cur_ctlr) + \"/{}/api/tenants/{}/sites/{}/elements/{}/extensions/{}\".format(api_version,\n tenant_id,\n site_id,\n element_id,\n extension_id)\n\n api_logger.debug(\"URL = %s\", url)\n return self._parent_class.rest_call(url, \"put\", data=data)", "def _generate_src():\n for ext in extensions:\n yield self.src_format[ext](f=\"{}{}\".format(name, ext))", "def add_file_extension(cur_dir, new_ext, sub_dirs=False):\n if sub_dirs:\n for root, dirs, files in os.walk(cur_dir):\n for filename in files:\n oldname = os.path.join(root, filename)\n os.rename(oldname, oldname+new_ext)\n else:\n files = os.listdir(cur_dir)\n for filename in files:\n # print \"Filename:\", filename\n os.rename(cur_dir+filename, cur_dir+filename+new_ext)", "def extra_options():\n extra_vars = {\n 'auto_detect_cpu_features': [True, \"Auto-detect available CPU features, and configure accordingly\", CUSTOM],\n 'with_shared': [True, \"Enable building of shared ELPA libraries\", CUSTOM],\n 'with_single': [True, \"Enable building of single precision ELPA functions\", CUSTOM],\n 'with_generic_kernel': [True, \"Enable building of ELPA generic kernels\", CUSTOM],\n }\n\n for flag in ELPA_CPU_FEATURE_FLAGS:\n if flag == 'sse4_2':\n conf_opt = ['sse', 'sse-assembly']\n elif flag == 'avx512f':\n conf_opt = ['avx512']\n else:\n conf_opt = [flag]\n\n for opt in conf_opt:\n help_msg = \"Configure with --enable-%s (if None, auto-detect support for %s)\" % (opt, flag.upper())\n extra_vars['use_%s' % flag] = [None, help_msg, CUSTOM]\n\n return ConfigureMake.extra_options(extra_vars)", "def add_extend(self, lines, name=''):\n return self._add_scope(lines, '%extend ' + name + '{', '}', indent=None, inline=False)" ]
[ "0.68608624", "0.6400358", "0.63571113", "0.63295335", "0.63138413", "0.6188864", "0.6090052", "0.6024872", "0.59441936", "0.5844638", "0.5754354", "0.5681884", "0.56702286", "0.5670175", "0.56068116", "0.5569463", "0.5534916", "0.5526108", "0.54374963", "0.54269266", "0.5408903", "0.5392813", "0.5390891", "0.5381402", "0.53313565", "0.532421", "0.52844524", "0.5261528", "0.5156773", "0.512231", "0.5119293", "0.5114408", "0.51096976", "0.50925195", "0.50796777", "0.50561094", "0.503634", "0.50341886", "0.5029443", "0.49864185", "0.4969868", "0.49581024", "0.49289253", "0.49230507", "0.4910769", "0.48982653", "0.48919165", "0.48615453", "0.48584828", "0.48466378", "0.48441136", "0.48163113", "0.48161122", "0.4811506", "0.4801395", "0.48013568", "0.47962552", "0.47933784", "0.47924238", "0.4779595", "0.4770148", "0.47606194", "0.4737755", "0.47195432", "0.46969387", "0.4693957", "0.46782714", "0.46665853", "0.46639624", "0.4661481", "0.46593088", "0.46370426", "0.4632543", "0.4628979", "0.46204978", "0.46176627", "0.46157137", "0.46153313", "0.46146676", "0.46143714", "0.46090934", "0.4593594", "0.45862955", "0.4579485", "0.4569763", "0.4568853", "0.45632264", "0.45617172", "0.45499232", "0.45447776", "0.4540488", "0.4539045", "0.45358658", "0.45276183", "0.45247835", "0.45210084", "0.45202598", "0.4516251", "0.4516131", "0.4512913" ]
0.7651184
0
Print out list field.
def show_list(self, desc, lst, writeln): if not lst: return val = ', '.join([list_escape(v) for v in lst]) writeln("%s: %s" % (desc, val))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def print_list(self):\r\n pass", "def print_list(self):\n self.print_avec_separateur(\" \")", "def list_print(self):\n node = self.cur_node # cant point to ll!\n while node:\n print(node.data)\n node = node.next", "def scapy_fields_FieldListField_i2repr(self, pkt, x):\n\treturn repr([self.field.i2repr(pkt, v) for v in x])", "def __str__(self):\n return str(self.list)", "def print_list(self):\n\n current = self.head\n\n while current is not None:\n print current.data\n current = current.next", "def display_fields(self):\r\n\r\n field_text = self.show_fields()\r\n field_text_list = field_text.split(EOL)[0:-1]\r\n\r\n def fld_format (x_temp):\r\n\r\n x_temp = x_temp.split(COLON)[0], x_temp.split(COLON)[1]\r\n\r\n \"\"\"formats output of the list of search results\"\"\"\r\n\r\n if not isinstance(x_temp[1],str):\r\n shown_indexes = rangelist.range_find([int(Index(a_temp))\r\n for a_temp in x_temp[1]],reduce=True)\r\n else:\r\n shown_indexes = x_temp[1]\r\n\r\n if len(shown_indexes) < 20:\r\n return (abridge(x_temp[0]).replace(VERTLINE,SLASH)\r\n +VERTLINE\r\n +shown_indexes)\r\n\r\n\r\n returnlist = []\r\n sp_temp = rangelist.split_up_range(shown_indexes)\r\n\r\n\r\n returnlist.append(x_temp[0].replace(VERTLINE,SLASH)[0:min([60,len(x_temp[0])])]\r\n +VERTLINE+sp_temp[0])\r\n for s_temp in sp_temp[1:]:\r\n returnlist.append(VERTLINE+s_temp)\r\n\r\n return returnlist\r\n\r\n show_list(field_text_list,\r\n alerts.FIELDS[3:],0,40,\r\n func=fld_format,\r\n present=True,\r\n display=display)", "def __repr__(self):\n return '<List %r>' % (self.name)", "def printList(self): \r\n aux = self.head \r\n while(aux): \r\n print(aux.data , end = ' ') \r\n aux = aux.next", "def pr(self):\n return self.listformat.format(self.idx, str(self).rstrip())", "def print_list(self) -> None:\n cur_node = self.head\n while cur_node:\n print(cur_node.data)\n cur_node = cur_node.next", "def _print_field(h_lines: List[Line]):\n\n print(H_LINE)\n\n for line in h_lines:\n line.print()\n\n print(H_LINE)", "def _print_fields(print_list):\n max_field_name_length = 0\n for pair in print_list:\n if max_field_name_length < len(pair[0]):\n max_field_name_length = len(pair[0])\n\n for pair in print_list:\n print (\" %*s = %s\" % (max_field_name_length, pair[0], pair[1]))", "def print_list(self):\n p = self.head\n i = 0\n\n while i < self.size():\n print(p.data)\n i += 1\n p = p.next_node", "def print_bul_list(self, l):\n self.print_newline()\n for i in l:\n self._write(\" - %s\\n\" % i)\n self.print_newline()", "def display_list(list=[]):\n\n print(f\"Current List: {list}\")", "def displayField(self):\n print(\"Field :\")\n for i in range(len(self.field)):\n currentSuit = Suit(i + 1)\n print(Bcolor.BOLD + Suit.toColor(currentSuit) + \"\\t\" + str(currentSuit), self.field[i], end=\"\\t\" + Bcolor.END)\n print()", "def viewList(list):\n for i in list:\n print i", "def print_list(l):\n print('[' + ', '.join([x.__str__() for x in l]) + ']')", "def print_list(l, name=None, output=sys.stdout):\n if not name:\n name = '<no name given>'\n\n if not l or len(l) == 0:\n print('>>>>>>> list = ' + name + ' - EMPTY', file=output)\n return\n\n if name is None:\n name = str(l[0].__class__)\n\n print('>>>>>>> list = ' + name + ', len = ' + str(len(l)), file=output)\n for l_i in l:\n print(str(l_i), file=output)", "def __str__(self):\n \n return \"ID: %s, %s (%s)\" % (self.list_id, self.name, self.items)", "def print_list(self):\n node = self.head\n\n string = '['\n while node:\n if node.next:\n string += str(node.value) + ' -> '\n else:\n string += str(node.value)\n node = node.next\n string += ']'\n return string", "def printL(L, L_name='List', verbose=True):\n if verbose:\n ('\\n[' + L_name + ']:')\n if verbose:\n for item in list(L):\n print('\\t' + str(item))\n print('[' + L_name + '] length: ' + str(len(L)) + '\\n')", "def list(self, arg: SeField[Any]) -> str:\n if is_bare_list(arg.type):\n return arg.varname\n else:\n earg = arg[0]\n earg.name = \"v\"\n return f\"[{self.render(earg)} for v in {arg.varname}]\"", "def cli_print_record( field_list, showid=False):\n debug(\"cli_print_record(%s)\" % field_list)\n try:\n raw_record_list = api.find_records(field_list)\n except NoRecordsFound as error:\n print \"No records found for: %(field_list)s, %(error)s\" % locals() \n return False\n except InvaildQuery as error:\n print \"Not query to query database with\"\n return False\n\n # Grab all the display fields from the field_list\n display_field_list = api.get_display_fields(field_list)\n\n # Commented out, as i will assume if you have not asked for any fields,\n # then you want them all\n # Make sure that name is in the display_field_list\n #if 'name' not in display_field_list:\n # display_field_list.append('name')\n\n record_list = []\n record_length = defaultdict(int)\n for raw_record in raw_record_list:\n record = raw_record\n for k, v in raw_record.items():\n if isinstance(v, list):\n v = \",\".join(v)\n record[k] = v\n if record_length[k] < len(str(v)):\n record_length[k] = len(str(v))\n record_list.append(record)\n\n if display_field_list:\n\n simple_format = re.sub('(?P<m>\\w+)',\"%(\\g<m>)s\", \" \".join(display_field_list) )\n\n # Better formatting of the simple_format string\n display_string = \"\"\n for d in display_field_list:\n display_string += \"%%(%s)-%ds \" % (d, record_length[d])\n simple_format = display_string\n\n for record in record_list:\n try:\n print simple_format % record\n except KeyError as error:\n debug(\"cli_print_record: unable to print fields for record: %(error)s\" % locals())\n else:\n for record in record_list:\n print\n print '\\033[1m%(name)s\\033[0m' % record\n for key, value in sorted(record.items()):\n if type(value).__name__ in [ 'str', 'unicode','int','float','bool']:\n print \" %(key)s: %(value)s\" % locals()\n continue\n elif type(value).__name__ in [ 'list', 'set']:\n print \" %s: %s\" % ( key, \",\".join( value) )\n continue\n elif type(value).__name__ == 'ObjectId':\n if showid:\n print \" %(key)s: %(value)s\" % locals()\n continue\n elif type(value).__name__ == 'NoneType':\n continue\n\n else:\n raise RecordKeeperException(\"Unhandled data format '%s' <%s>\" % ( key, type(value).__name__))", "def print_list(list_, format_=None):\n\n format_ = format_ or DEFAULT\n\n if format_ == TEXT:\n for item in list_:\n print(item)\n elif format_ == JSON:\n print(json.dumps(list_))", "def display_list(the_list):\n print(\"\\n===================================\")\n for person in the_list:\n print(\"{name:12s}\\t\\t{phone}\".format(name=person.name, phone=person.phone))\n if the_list == []:\n print(\"\\nNo entries found!\\n\")\n print(\"===================================\\n\")", "def print_list(self, items):\n\t\tstrtype = unicode if self.encoding else bytes\n\t\titems = map(strtype, items)\n\t\twidth = self.get_width()\n\t\tlines = []\n\t\tsep = strtype(' ')\n\t\tfor item in items:\n\t\t\tif lines:\n\t\t\t\tnew = lines[-1] + sep + item\n\t\t\t\tif len(new) <= width:\n\t\t\t\t\tlines[-1] = new\n\t\t\t\t\tcontinue\n\t\t\tlines.append(item)\n\t\tself.write(strtype('\\n').join(lines))", "def get_fieldlist(cls):\n return cls.fieldlist", "def list(self):", "def list():", "def list():", "def __str__(self):\n \n return \"ID: %s, %s (%s)\" % (self.list_id, self.name, self.pieces)", "def _getListing(self):\n\n # lets assure consistent litsting order\n items = self._items.items()\n items.sort()\n return [ \"%s%s%s: %s\" % (_def_sep, str(x[1]), _def_sep, x[1].__doc__)\n for x in items ]", "def __str__(self):\n if self.item_info:\n item_type = self.item_info['type'](self.item_info)\n\n return gettext('List of %s') % item_type\n else:\n return gettext('List')", "def do_list(self, arg):\n print('The roster includes the following members:\\n')\n lines = formatTable(\n map(self.memberToList, self.roster),\n [\n ColumnFormat('id', 4),\n ColumnFormat('name', 30),\n ColumnFormat('introduced', 12)\n ]) \n for line in lines: \n print(line)", "def output(self):\n\n to_write = []\n\n\n try:\n test_field = self.fields[0]['id']\n except IndexError:\n test_field = None\n\n\n if test_field == None or test_field != 0:\n # missing fields\n return to_write\n\n\n\n to_write += ['#\\n# '+self.name+'\\n#\\n']\n to_write += ['DEF '+\n self.name+' '+\n self.ref+' '+\n '0 '+ # 0\n '1 '+ # off\n self.pin_numbers_visible + ' '+\n self.pin_names_visible + ' '+\n '1 '+\n 'F '+\n self.powerobject + '\\n'\n ]\n\n to_write += ['$FPLIST\\n']\n to_write += ['$ENDFPLIST\\n']\n\n for field in self.fields:\n line = 'F'\n for key in self._F_KEYS:\n line += str(field[key]) + ' '\n to_write += [line.rstrip() + '\\n']\n\n if self.alias != '':\n to_write += ['ALIAS '+self.alias+'\\n']\n\n to_write += ['DRAW\\n']\n\n for draw in self.draws:\n #print \"==================>\",draw.output()\n to_write += [draw.output()]\n\n for connection in self.connections:\n to_write += [connection.output()]\n\n to_write += ['ENDDRAW\\n']\n\n to_write += ['ENDDEF\\n']\n\n\n\n return to_write", "def print_num_list(self, l):\n self.print_newline()\n for num, item in enumerate(l):\n self._write(\" %i. %s\\n\" % (num + 1, item))\n num += 1\n self.print_newline()", "def fields(self):", "def __str__(self):\n return list.__str__(self)", "def print_list(arr: list, format: str = \"{}: {}\", l_type: ListTypes = ListTypes.NUMERIC_ORDERED) -> str:\n\n result = \"\"\n for i, e in enumerate(arr):\n result += get_list_entry_str(e, i, format, l_type) + '\\n'\n\n return result[:-1]", "def showListFromNode(self, node):\n if self.empty():\n return \"Linked List is Empty\"\n\n l = node\n while l is not None:\n print(l.data, end=\" ----> \")\n l = l.next\n print()\n return", "def print_list(l):\n for elem in l:\n print(elem)", "def _print_field(self):\n for y in range(self.game.field.size[1]):\n for x in range(self.game.field.size[0]):\n thing_type = None # Default: nothing on the coords.\n\n all_things_on_coords = game.what_is_on_coords((x, y))\n if all_things_on_coords:\n # If there are more things on one coordinates, only the\n # first one is printed. Symbol is determined by the class\n # of the thing.\n thing_type = type(all_things_on_coords[0])\n\n # Symbols are horizontally separated by a space for better\n # legibility.\n print(SYMBOLS[thing_type], end=' ') # . . . X . .\n nl()\n nl()", "def print_list(self):\r\n head = self\r\n tail = self.__next # go to my next node\r\n if tail is not None: # as long as the end of the list has not been reached\r\n print(head, end=\" \") # print my head\r\n tail.print_list() # recursively print remainder of the list\r\n else: # print the last element\r\n print(head, end=\" \")", "def _debug_node(self, lst, level, node):\n\n temp = \" \" * level\n temp += \"|\"\n temp += \"-\" * level\n\n if node.real:\n temp += \"%s[%s]\" % (node.key, node.value)\n else:\n temp += \"%s\" % (node.key)\n\n lst.append(temp)\n\n for child in node.children:\n self._debug_node(lst, level + 1, child)", "def print_list(list_to_parse: list):\n for item in list_to_parse:\n print(item)", "def test_list_field():", "def get_list_display(self, request):\n list_display = []\n for field_name in self.list_display:\n try:\n db_field = self.model._meta.get_field(field_name)\n if isinstance(db_field, BooleanField):\n field_name = boolean_switch_field(db_field)\n except FieldDoesNotExist:\n pass\n list_display.append(field_name)\n return list_display", "def list_repr(self):\n pretty = []\n pretty.append(self.name)\n for node in self.evaluation_sequence:\n pretty.append(node.list_repr())\n return '\\n '.join(pretty)", "def description(self) -> str:\n return f\"List of {self.key}\"", "def listFields(self):\n return self.get_json('/field')", "def print_data_list(self):\n print('\\n{0}'.format(self.webDataFrame))", "def fields(self):\n ...", "def printlist(self):\n current_element = self.head\n items = []\n while current_element:\n items.append(current_element.value)\n current_element = current_element.next\n return items", "def print_list(input_):\n for item in input_:\n print(item, end = \" \")\n print(\"\")", "def __str__(self):\n\n list_str = ''\n current = self.head\n while current:\n # print(current, \"current\")\n list_str += str(current.value ) + ', '\n current = current.next\n return list_str[:-2]", "def test_cell_list_fields_success(self, mock_printlist, mock_list):\n self.shell('cell-list -r 1 --fields id name')\n mock_list.assert_called_once_with()\n mock_printlist.assert_called_once_with(mock.ANY,\n list({'id': 'ID',\n 'name': 'Name'}))", "def list_viewer(listt):\n\tif len(listt) == 0:\n\t\tprint(\"There are no elements\")\n\t\tprint()\n\telse:\n\t\ti = 0\n\t\tfor dictionary in listt:\n\t\t\ti += 1\n\t\t\tprint(f\"Account #{i} »»\")\n\t\t\tprint(\n\t\t\t\t\"\\tService Name: \", dictionary[\"service\"], \"\\n\",\n\t\t\t\t\"\\tUser Name: \", dictionary[\"user\"], \"\\n\",\n\t\t\t\t\"\\tPassword: \", dictionary[\"password\"], \"\\n\",\n\t\t\t\t)", "def printList(head):\n print(deconstructList(head))", "def __str__(self):\n\n list_str = ''\n current = self.head\n while current:\n # print(current, \"current\")\n list_str += str(current.value ) + ', '\n current = current.next\n \n return list_str[:-2]", "def list_fields(fc):\n return [f.name for f in arcpy.ListFields(fc)]", "def list_fields(fixture_file=settings.FIXTURE_PATH, list_to_shell=True):\n fields = []\n with open(fixture_file, 'r') as posts:\n posts = json.load(posts, encoding='utf8')\n i = 0\n for post in posts:\n for field in post['fields']:\n fields.append(field)\n i += 1\n if list_to_shell:\n print yellow(\"All available BlogPost fields:\")\n print fields\n print yellow(\"%i fields total\" % i)\n return fields", "def display_contents(CurrentList):\n\n print(\"========================Start of display_contents() Method*\")\n print(\"The number of items in list are :\" + str(len(CurrentList)))\n print(\"----- Fl.ID--- ||sub_T|| reqStart||Dur ||Start||End\")\n # Flight ID||sub_Time||reqStart||reqDuration||actualStart||actualEnd\")\n for j in range(len(CurrentList)):\n print(str(j) + \": \" + CurrentList[j].showFlightInfo())\n print(\"========================END of display_contents() Method *\")", "def display_list(self, prefix=''):\n parent = '' if self.parent is None else self.parent.id\n children = [] if not self.children else [c.id for c in self.children]\n output = (\n f'{prefix}NODE ID: {self.id}\\n'\n f'{prefix} type: {self.node_type}\\n'\n f'{prefix} label: {self.label}\\tparent node: {parent}\\n'\n f'{prefix} arity: {self.arity}\\tchild node(s): {children}\\n\\n')\n if self.children:\n output += ''.join(child.display_list(prefix=prefix+'\\t')\n for child in self.children)\n return output", "def __repr__(self):\n return str(self.list_all())", "def basic_print(lista):\n for item in lista:\n print(\"{} \\t\\t {}\".format(item[0], item[1]))", "def printList(head) :\n \n # Iterate through the list, printing all values\n ptr = head\n while ptr :\n print(ptr.data, end=\" \")\n ptr = ptr.next\n print()", "def print_list(l, numbered = True, bullet_character = '-'):\n for index, element in enumerate(l):\n if numbered:\n print(\"{}: {}\".format(index+1, element))\n else:\n print(\"{} {}\".format(bullet_character, element))", "def __str__(self):\n return gettext('List of %s') % self.resource.__name__", "def repr_fields(self):\n pa = set_blank_if_default(self.pa, 0)\n pb = set_blank_if_default(self.pb, 0)\n\n w1a = set_blank_if_default(self.wa[0], 0.0)\n w2a = set_blank_if_default(self.wa[1], 0.0)\n w3a = set_blank_if_default(self.wa[2], 0.0)\n\n w1b = set_blank_if_default(self.wb[0], 0.0)\n w2b = set_blank_if_default(self.wb[1], 0.0)\n w3b = set_blank_if_default(self.wb[2], 0.0)\n x1, x2, x3 = self.get_x_g0_defaults()\n\n # offt doesn't exist in NX nastran\n offt = set_blank_if_default(self.offt, 'GGG')\n\n list_fields = ['CBAR', self.eid, self.Pid(), self.Ga(), self.Gb(), x1, x2,\n x3, offt, pa, pb, w1a, w2a, w3a, w1b, w2b, w3b]\n return list_fields", "def printEntry(self,i):\n for name in self.namelist:\n value = self.getValue(name,i)\n if value is None:\n return\n print(name,self.getValue(name,i))", "def build_list_field(self, field_name: str, field: dict):\n field_layout = [sg.Text(self.build_label_text(field_name, field), size=(15, 1)),\n sg.Listbox(field.get(\"options\"), default_values=field.get(\"default\"), size=(20, 4),\n enable_events=False, key=field_name)]\n\n return field_layout", "def print_list(things_to_print, prefix=\"\\t\", stream=sys.stdout):\n for item in things_to_print:\n print(f\"{prefix}{item}\", file=stream)", "def print(self):\n current = self.head.next\n for i in range(0,self.count):\n print(current.item)\n current = current.next", "def print_process_list(self) -> None:\n\n print(f\"Process List: {self.process_list}\")", "def __str__(self):\n list_string = \"\"\n for item in self._data:\n list_string += item + \", \"\n return list_string", "def simple_list_display(keyval: str, record: dict, title: str, no_val: bool = False):\n if keyval in record:\n if len(record[keyval]):\n if no_val:\n result = \", \".join(list(record[keyval]))\n else:\n result = \", \".join(m[\"value\"].title() for m in record[keyval])\n print(f\"{bold(title)}: {result}\\n\")", "def print_items(items): \n print(items)", "def display_list(d):\n print(\"\\nOur generous donors: \\n\")\n for donor_name in iter(d.donors):\n print(donor_name)\n print(\"\\n\")", "def _output_vadapter_list_verbose(outf, name, vadapter_list):\n\n if vadapter_list:\n FMT = \"%s\\n%-10s\\n%-20s\\n%-5s\\n%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n\"\n# HEADER = (\"id\",\"name\",\"io_module_id\",\"vfabric_id\",\"protocol\",\"init_type\",\"desc\",\"status\",\"mac\"\n# ,\"prmiscuous_mode\",\"silent_listener\",\"vlan\",\"wwnn\",\"wwpn\",\"fc_id\",\"spma\",\"fmpa\") \n\t\n for (n , vadapter) in vadapter_list:\n name = vadapter['NAME']\n id = vadapter['ID']\n protocol = vadapter['PROTOCOL']\n vfabric = vadapter['VFABRIC']\n io_module = vadapter['IO_MODULE_ID']\n desc = vadapter['DESC']\n status = vadapter['STATUS']\n init_type = vadapter['INIT_TYPE']\n running_mode = vadapter['RUNNING_MODE']\n if protocol == 1:\n mac = vadapter['MAC']\n promiscuous = vadapter['PROMISCUOUS']\n silent = vadapter['SILENT']\n vlan = vadapter['VLAN']\n elif protocol == 3:\n wwnn = vadapter['WWNN']\n wwpn = vadapter['WWPN']\n spma = vadapter['SPMA']\n fpma = vadapter['FPMA']\n fc_id = vadapter['FC_ID']\n\n\n if _LIMITED_SHOW:\n outf.write('General Attr:\\nId: %s\\n\\tName: %s\\n\\tIO_Module: %s\\n\\tVfabric: %s\\n\\tProtocol: %s\\n\\tInitization_Type: %s\\n\\tDescription: %s\\n\\tStatus: %-20s\\n\\n' % (id, name, io_module, vfabric, protocol, init_type, desc, status)) \n \n\t\telif _DETAIL_SHOW: \n if protocol == 3:\n outf.write('General Attr:\\nId: %s\\nName: %s\\nIO_Module: %s\\nVfabric: %s\\nProtocol: %s\\nInitization_Type: %s\\nDescription: %s\\nStatus: %-20s\\nRUNNING_MODE :%s\\nFC_Attr:\\n\\tWWNN: %s\\n\\tWWNP: %s\\n\\tFC_ID: %s\\n\\tSPMA: %s\\n\\tFMPA: %s\\n\\n' % (id, name, io_module, vfabric, protocol, init_type, desc, status, running_mode, wwnn, wwpn,fc_id,spma,fpma)) \n \n elif protocol == 1:\n\t\t\t outf.write('General Attr:\\nId: %s\\nName: %s\\nIO_Module: %s\\nVfabric: %s\\nProtocol: %s\\nInitization_Type: %s\\nDescription: %s\\nStatus: %-20s\\nRUNNING_MODE :%s\\nEN_Attr:\\n\\tMAC: %s\\n\\tVLAN: %s\\n\\tPromiscuous: %s\\n\\tSilent Listener: %s\\n\\n' % (id , name, io_module,vfabric, protocol, init_type, desc, status, running_mode, mac,vlan, promiscuous, silent))", "def print_list(words):\r\n for w in words:\r\n print w,\r\n print", "def _print_json_list(self, resource_wrappers):\n is_empty_list = True\n for i, resource_wrapper in enumerate(resource_wrappers):\n is_empty_list = False\n if i == 0:\n # Start of JSON list for long long listing.\n print('[')\n print(resource_wrapper, end='')\n else:\n # Print resource without newline at end to allow list formatting for\n # unknown number of items in generator.\n print(',\\n{}'.format(resource_wrapper), end='')\n\n # New line because we were removing it from previous prints to give us\n # the ability to do a trailing comma for JSON list printing.\n print()\n if not is_empty_list:\n # Close long long listing JSON list. Prints nothing if no items.\n print(']')", "def print_list(lst):\n i = 0\n while i < len(lst):\n print(lst[i])\n i += 1", "def write_listing(listing, dep, pom):\n print(f'| {dep.groupId} | {dep.artifactId} | {dep.version} ', end='', file=listing)\n print(f'| {pom.authors} ', end='', file=listing)\n print(f'| {pom.license.name} | {pom.license.url} ', end='', file=listing)\n print(f'|', file=listing)", "def list(\n self,\n name,\n ):\n pass", "def print_list(items):\n for element in items:\n print(element)", "def fields(self):\r\n pass", "def show(self):\n if self.is_empty():\n print('[]')\n return\n line = '['\n for item in self._data:\n line += '(' + str(item._key) + ', ' + str(item._value) + '), '\n line = line[:-2] + ']'\n print(line)", "def _render_list_to_string(self, alist):\n return \",\".join(self._render_row(alist))", "def display(self):\r\n\t\ts = self.options['space']\r\n\t\tv = self.level\r\n\t\tt = self.options['tab']\r\n\t\tp = self.options['sep']\r\n\t\tb = self.options['bullet']\r\n\t\tprint(v*t+b+s+self.text)\r\n\t\tfor each_item in self.items:\r\n\t\t\teach_item.display()", "def pprint(self):\n # just here for defining the interface; work is done in subclasses\n pass", "def print_list(data):\n for i, line in enumerate(data):\n print(\"Linha {}: {}\".format(i, line))", "def __str__(self):\n text = []\n for varname, attrname, valuetype, label, widget, group, options in self.entries:\n value = getattr(self, varname, \"\")\n text.append(\"%s: %s\" % (attrname, value))\n return \"\\n\".join(text)", "def print(self):\n temp = self.head\n while temp.next!=None:\n temp = temp.next\n \n print(temp.value, end= ' ')\n print(\"\")", "def print_csv(self, items, fields):\r\n writer = csv.writer(sys.stdout)\r\n writer.writerow(fields)\r\n for i in items:\r\n i_fields = [self.string(getattr(i, f)) for f in fields]\r\n writer.writerow(i_fields)", "def fieldhelp2(self, fieldid):\n txt = []\n dd_desc = M.Globals[\"^DD\"][self.fileid][fieldid][21]\n for k,v in dd_desc.keys_with_decendants():\n txt.append(dd_desc[k][0].value)\n return '\\n'.join(txt)", "def print(self):\n print(\"Repository list: \")\n for repo in self.list:\n print(\"- \" + repo.name)", "def printall():\n print listAll()", "def fields(self):\n return [f[1] for f in sorted(self.dd.fields.items())]" ]
[ "0.7778303", "0.72787195", "0.68855184", "0.6764633", "0.6687931", "0.6623039", "0.65351075", "0.65242046", "0.65014845", "0.64988315", "0.6490655", "0.6490049", "0.6484101", "0.6460331", "0.64499795", "0.6441054", "0.64375824", "0.64356184", "0.64225334", "0.63925815", "0.63714653", "0.63535994", "0.63505226", "0.6322236", "0.6306847", "0.6293236", "0.628878", "0.6238374", "0.62279314", "0.61892545", "0.6172169", "0.6172169", "0.61639464", "0.61500233", "0.61303437", "0.6114983", "0.6102275", "0.6102047", "0.60574925", "0.60339355", "0.60292274", "0.5983744", "0.59739316", "0.59738696", "0.5973402", "0.59672004", "0.5961906", "0.59502554", "0.5947324", "0.5943737", "0.59412146", "0.59392583", "0.593886", "0.5930515", "0.5928484", "0.5925337", "0.5921385", "0.5914458", "0.59103304", "0.5875536", "0.5874712", "0.58687985", "0.5866748", "0.5844231", "0.583693", "0.58294874", "0.5823713", "0.58234996", "0.5822362", "0.5817977", "0.581567", "0.5810555", "0.5805821", "0.58050144", "0.5800849", "0.5787462", "0.5781969", "0.57709074", "0.57661426", "0.5763655", "0.57581514", "0.5755863", "0.5750194", "0.5728858", "0.57224274", "0.57206815", "0.57089305", "0.5703585", "0.56972206", "0.56964767", "0.5690245", "0.5689903", "0.5684477", "0.5683017", "0.56782603", "0.56664026", "0.5661525", "0.5658881", "0.5650906", "0.5643106" ]
0.6982858
2
Returns backend to use.
def get_backend(): from cryptography.hazmat.backends import default_backend return default_backend()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_backend():\n return _BACKEND", "def get_backend(name):\n return _DEFAULT_PROVIDER.get_backend(name)", "def get_backend():\n return __SETTINGS__._BACKEND", "def _get_backend(args):\n if args.backend == 'gatttool':\n backend = GatttoolBackend\n elif args.backend == 'bluepy':\n backend = BluepyBackend\n elif args.backend == 'pygatt':\n backend = PygattBackend\n else:\n raise Exception('unknown backend: {}'.format(args.backend))\n return backend", "def get_default_backend():\n return __default_backend", "def get_backend():\n\n return sys.modules[__name__]", "def get_backend():\n\n return sys.modules[__name__]", "def backend(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"backend\")", "def get_backend():\n return sys.modules[__name__]", "def get_backend():\n return sys.modules[__name__]", "def getBackend(self):\n return self.header['BACKEND']", "def get_backend():\n return Connection()", "def get_storage_backend(self):\n return self.client.info()['Driver']", "def backend(self) -> str:\n return self.__class__.BACKEND_NAME", "def get_backend():\n global _ACTIVE_BACKEND\n if not _ACTIVE_BACKEND:\n _ACTIVE_BACKEND = locate(settings.SITE_BACKEND)()\n return _ACTIVE_BACKEND", "def backend(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"backend\")", "def backend(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"backend\")", "def backend(self):\n # This never changes (so no read locking needed).\n return self._backend", "def get_backend(self):\n return self.analyze_db_task(constants.TRAIN_DB).backend", "def backend_name(self) -> str:\n return self._db_data.backend", "def _backend(self) -> Backend:\n return self.__backend", "def get_backend_class(backend):\n # NOTE(sirp): avoiding circular import\n from glance.store.http import HTTPBackend\n from glance.store.s3 import S3Backend\n from glance.store.swift import SwiftBackend\n from glance.store.filesystem import FilesystemBackend\n\n BACKENDS = {\n \"file\": FilesystemBackend,\n \"http\": HTTPBackend,\n \"https\": HTTPBackend,\n \"swift\": SwiftBackend,\n \"s3\": S3Backend}\n\n try:\n return BACKENDS[backend]\n except KeyError:\n raise UnsupportedBackend(\"No backend found for '%s'\" % backend)", "def get_profile_backend(self, profile):\n return self._get_attribute(profile, 'backend')", "def _get_plot_backend(backend: str | None = None):\n backend_str: str = backend or get_option(\"plotting.backend\")\n\n if backend_str in _backends:\n return _backends[backend_str]\n\n module = _load_backend(backend_str)\n _backends[backend_str] = module\n return module", "def find_backend(cls) -> IBackend:\n cls.Lock.acquire()\n try:\n return cls._load_backend()\n finally:\n cls.Lock.release()", "def get_backend(\n self,\n backend_id: str,\n ) -> Optional[Type[BaseCertificateStorageBackend]]:\n return self.get('backend_id', backend_id)", "def get_tgis_backend():\n global tgis_backend\n return tgis_backend", "def get_backend():\n backend_path = settings.CALENDAR_BACKEND\n\n try:\n backend_modulename, backend_classname = backend_path.rsplit('.', 1)\n except ValueError:\n raise ImproperlyConfigured('{0} isn\\'t a backend module'.format(backend_path))\n\n # See if the module has already been imported.\n try:\n backend_module = sys.modules[backend_modulename]\n except KeyError:\n # ok, then import it.\n try:\n backend_module = import_module(backend_modulename)\n except ImportError as e:\n raise ImproperlyConfigured('Error importing backend {0}: \"{1}\"'.format(backend_modulename, e))\n\n try:\n backend_class = getattr(backend_module, backend_classname)\n except AttributeError:\n raise ImproperlyConfigured(\n 'Backend module \"{0}\" does not define a \"{1}\" class'.format(backend_modulename, backend_classname)\n )\n\n backend_instance = backend_class()\n\n if not isinstance(backend_instance, BaseBackend):\n raise ImproperlyConfigured(\n 'Backend class \"{0}\" is not a subclass of \"django_calendar.backends.BaseBackend\"'.format(backend_classname)\n )\n\n return backend_instance", "def detect_backend():\n try:\n from termpixels.unix import UnixBackend\n return UnixBackend()\n except:\n try:\n from termpixels.win32_vt import Win32VtBackend\n return Win32VtBackend()\n except Exception as e:\n raise e\n from termpixels.win32 import Win32Backend\n return Win32Backend()", "def backend_protocol(self) -> Optional[pulumi.Input[Union[str, 'BackendProtocol']]]:\n return pulumi.get(self, \"backend_protocol\")", "def get_backend(self, name):\n if name == DATABASE_TYPE_MYSQL:\n ret = 2\n elif name == DATABASE_TYPE_POSTGRESQL:\n ret = 3\n elif name == DATABASE_TYPE_SQLITE:\n ret = 4\n # sqlcoder: this assignment fixes unicode problems for me with sqlite (windows, cp1252)\n # feel free to remove or improve this if you understand the problems\n # better than me (not hard!)\n Charset.not_needed1, Charset.not_needed2, Charset.not_needed3 = True, True, True\n else:\n raise ValueError('Unsupported database backend: %s' % self.supported_databases[name].db_server)\n\n return ret", "def get_circ_backend(self, hub: str = 'ibm-q-afrl', default_backend=None) -> basebackend:\n\n if default_backend is None:\n default_backed = _preferred_backend\n\n default_backend = self.backend if self.backend is not None else default_backend\n return IBMQ.get_provider(hub=hub).get_backend(default_backend)", "def get_default():\n backend, opts = parse_default()\n assert backend is not None\n return load_backend(backend, opts)", "def get_backend() -> BiasCorrectionAlgoBackend:", "def get_backend(cls, backend=None):\n return backend if backend else aws.S3Backend(\n category=cls.default_category, bucket_name=cls.default_bucket)", "def name(self):\n return self._config.backend_name", "def get_authentication_backend(cls):\r\n return '%s.%s' % (cls.BACKEND_CLASS.__module__, cls.BACKEND_CLASS.__name__)", "def get_provider_from_backend(backend):\n known_providers = {\n 'BasicAerProvider': 'qiskit.BasicAer',\n 'AerProvider': 'qiskit.Aer',\n 'IBMQProvider': 'qiskit.IBMQ',\n }\n if isinstance(backend, BaseBackend):\n provider = backend.provider()\n if provider is None:\n raise ImportError(\"Backend object '{}' has no provider\".format(backend.name()))\n\n return known_providers.get(provider.__class__.__name__, provider.__class__.__qualname__)\n elif not isinstance(backend, str):\n raise ImportError(\"Invalid Backend '{}'\".format(backend))\n\n for provider in known_providers.values():\n try:\n if get_backend_from_provider(provider, backend) is not None:\n return provider\n except:\n pass\n\n raise ImportError(\"Backend '{}' not found in providers {}\".format(backend, list(known_providers.values())))", "def set_backend(self, backend):\n if backend not in AVAILABLE_BACKENDS:\n raise StorageError(f'Unrecognized backend {backend}; use one of {AVAILABLE_BACKENDS}')\n if backend == 'tinydb':\n LOGGER.debug(\"Using TinyDB database as requested for %s\", self.name)\n self._backend = DB_TINYDB\n elif backend == 'sqlite':\n LOGGER.debug(\"Using SQLite database as requested for %s\", self.name)\n self._backend = DB_SQLITE\n elif backend == 'auto':\n if self._sqlite_storage.database_exists():\n LOGGER.debug(\"Using SQLite database in AUTO mode because one already exists for %s\", self.name)\n self._backend = DB_SQLITE\n else:\n LOGGER.debug(\"Using TinyDB (default) in AUTO because no database already exists for %s\", self.name)\n self._backend = DB_TINYDB", "def get_backend(output, filename, is_library):\n return Common32Backend(MarsTemplates(), output, filename, is_library)", "def get_backend_from_coredata(builddir: Path) -> str:\n return coredata.load(str(builddir)).get_builtin_option('backend')", "def _get_storage_backend(fq_classname):\n LOG.debug('Running _get_storage_backend with fq_classname [%s]'\n % fq_classname)\n\n if not fq_classname:\n return None\n\n (modname, clname) = fq_classname.rsplit('.', 1)\n # A test import of the backend storage class should have been undertaken\n # at app startup in django_drf_filepond.apps.ready so any failure\n # importing the backend should have been picked up then.\n mod = importlib.import_module(modname)\n storage_backend = getattr(mod, clname)()\n LOG.info('Storage backend instance [%s] created...' % fq_classname)\n\n return storage_backend", "def get_service_from_backend(backend):\n return ExperimentData.get_service_from_provider(backend.provider)", "def get_backend_by_name(cls_str):\n # type: (str) -> Backend\n try:\n return globals()[cls_str]()\n except KeyError:\n raise InvalidBackendClass('Invalid backend class name: {cls}'.format(cls=cls_str))", "def _get_active_backend(\n prefer=default_parallel_config[\"prefer\"],\n require=default_parallel_config[\"require\"],\n verbose=default_parallel_config[\"verbose\"],\n):\n\n backend_config = getattr(_backend, \"config\", default_parallel_config)\n\n backend = _get_config_param(\n default_parallel_config['backend'], backend_config, \"backend\"\n )\n prefer = _get_config_param(prefer, backend_config, \"prefer\")\n require = _get_config_param(require, backend_config, \"require\")\n verbose = _get_config_param(verbose, backend_config, \"verbose\")\n\n if prefer not in VALID_BACKEND_HINTS:\n raise ValueError(\n f\"prefer={prefer} is not a valid backend hint, \"\n f\"expected one of {VALID_BACKEND_HINTS}\"\n )\n if require not in VALID_BACKEND_CONSTRAINTS:\n raise ValueError(\n f\"require={require} is not a valid backend constraint, \"\n f\"expected one of {VALID_BACKEND_CONSTRAINTS}\"\n )\n if prefer == 'processes' and require == 'sharedmem':\n raise ValueError(\n \"prefer == 'processes' and require == 'sharedmem'\"\n \" are inconsistent settings\"\n )\n\n explicit_backend = True\n if backend is None:\n\n # We are either outside of the scope of any parallel_(config/backend)\n # context manager or the context manager did not set a backend.\n # create the default backend instance now.\n backend = BACKENDS[DEFAULT_BACKEND](nesting_level=0)\n explicit_backend = False\n\n # Try to use the backend set by the user with the context manager.\n\n nesting_level = backend.nesting_level\n uses_threads = getattr(backend, 'uses_threads', False)\n supports_sharedmem = getattr(backend, 'supports_sharedmem', False)\n # Force to use thread-based backend if the provided backend does not\n # match the shared memory constraint or if the backend is not explicitely\n # given and threads are prefered.\n force_threads = (require == 'sharedmem' and not supports_sharedmem)\n force_threads |= (\n not explicit_backend and prefer == 'threads' and not uses_threads\n )\n if force_threads:\n # This backend does not match the shared memory constraint:\n # fallback to the default thead-based backend.\n sharedmem_backend = BACKENDS[DEFAULT_THREAD_BACKEND](\n nesting_level=nesting_level\n )\n # Warn the user if we forced the backend to thread-based, while the\n # user explicitely specified a non-thread-based backend.\n if verbose >= 10 and explicit_backend:\n print(\n f\"Using {sharedmem_backend.__class__.__name__} as \"\n f\"joblib backend instead of {backend.__class__.__name__} \"\n \"as the latter does not provide shared memory semantics.\"\n )\n # Force to n_jobs=1 by default\n thread_config = backend_config.copy()\n thread_config['n_jobs'] = 1\n return sharedmem_backend, thread_config\n\n return backend, backend_config", "def find_backend(path, backends):\n for backend in backends:\n if backend.path == path:\n return backend\n\n return None", "def find_backend(path, backends):\n for backend in backends:\n if backend.path == path:\n return backend\n\n return None", "def get_from_backend(uri, **kwargs):\n\n parsed_uri = urlparse.urlparse(uri)\n scheme = parsed_uri.scheme\n\n backend_class = get_backend_class(scheme)\n\n return backend_class.get(parsed_uri, **kwargs)", "def backend_plugin(self):\n return None", "def backend_pool_type(self) -> Optional[pulumi.Input[Union[str, 'BackendPoolType']]]:\n return pulumi.get(self, \"backend_pool_type\")", "def get_backend_setting(cls, name, default=None):\n backend_settings = get_backend_settings(cls.BACKEND)\n if default is not None:\n return backend_settings.get(name, default)\n else:\n try:\n return backend_settings[name]\n except KeyError:\n raise ImproperlyConfigured(\"getpaid '%s' requires backend '%s' setting\" % (cls.BACKEND, name))", "def storage_backend_get_by_name(context, name, inactive=False):\n return _find_storage_backend(context, dict(name = name), True, None, inactive=inactive)", "def remote_backend(self, remote_backend):\n try:\n return self._remote_backend_plugins[remote_backend]\n except KeyError:\n raise Error(\"undefined remote backend: \\\"%s\\\"; defined remote backends: %s\" % (remote_backend, util.quoted_list(self._remote_backend.keys())))", "def get_backends(self) -> dict:\n return Config.get_backends()", "def crypto_key_backend(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"crypto_key_backend\")", "def which_backend(self, backend_name, type_name, conf):\n print(\"backend_name is : <{}>\".format(backend_name))\n if backend_name not in self.records.keys():\n print(\"first get object\")\n self.port_obj = PortFactory.backends.get(backend_name)(type_name, conf)\n print(\"get object from factory : {}\".format(self.port_obj))\n self.records[backend_name] = [type_name]\n else:\n print(\"re-init get object\")\n self.port_obj.reinit(type_name,conf)\n self.records[backend_name].append(type_name)\n print(\"factory records: {}\".format(self.records))\n return self.port_obj", "def get_active_backend(\n prefer=default_parallel_config[\"prefer\"],\n require=default_parallel_config[\"require\"],\n verbose=default_parallel_config[\"verbose\"],\n):\n backend, config = _get_active_backend(prefer, require, verbose)\n n_jobs = _get_config_param(\n default_parallel_config['n_jobs'], config, \"n_jobs\"\n )\n return backend, n_jobs", "def backend_getInterface(self):\n\t\treturn describeInterface(self)", "def _switch_backend(self, model_db):\n if model_db['backend_name'] != self.backend_name:\n backend = switch_backend(model_db['backend_name'])\n self.backend_name = backend.__name__\n self.backend_version = None\n if self.backend_name == 'keras':\n from ..backend import keras_backend\n self.backend = keras_backend\n elif self.backend_name == 'sklearn':\n from ..backend import sklearn_backend\n self.backend = sklearn_backend\n if hasattr(backend, '__version__'):\n check = self.backend_version != backend.__version__\n self.backend_version = backend.__version__\n if check and self.verbose > 0: # pragma: no cover\n sys.stderr.write('Warning: the backend versions'\n 'do not match.\\n') # pragma: no cover", "def load_backend(name, options=None):\n if name is None:\n assert options is None\n return get_default()\n if options is None:\n options = {}\n if name not in _backends:\n raise UnknownBackend(name)\n try:\n res = _backends[name]()(**options)\n except Exception as e:\n raise LoadingError(name) from e\n return res", "def backend_info_get(context, host):\n result = _backend_info_query(context, host)\n return result", "def get_by_backend_name(cls, backend_name):\r\n cls._check_configured()\r\n for enabled in cls._ENABLED.values():\r\n if enabled.BACKEND_CLASS.name == backend_name:\r\n return enabled", "def load_backend(backend: str | Type[Backend]) -> Type[Backend]:\n if isinstance(backend, type) and issubclass(backend, Backend):\n return backend\n elif isinstance(backend, str):\n try:\n backend = BUILTIN_BACKENDS[backend]\n except KeyError:\n raise ValueError(f'No such backend \"{backend}\"')\n p, m = backend.rsplit('.', 1)\n mod = importlib.import_module(p)\n attr = getattr(mod, m)\n if isinstance(attr, type) and issubclass(attr, Backend):\n return attr\n else:\n raise TypeError('Backend must be subclass of Backend class.')\n else:\n raise ValueError('Expecting string or Backend subclass.')", "def _load_driver(backend, **kargs):\n bk_module = importlib.import_module('backend', __package__)\n driver_cls = getattr(bk_module, str.capitalize(backend) + 'Backend')\n return driver_cls(**kargs)", "def backend_echo(threescale, backend_usages):\n\n return threescale.backends.read(backend_usages[0][\"backend_id\"])", "def backend(self) -> Dict[str, Any]:\n # Terraform can only have one backend configured; this formats the\n # data to make it easier to work with\n return [\n {\"type\": k, \"config\": v}\n for k, v in self.terraform_block.get(\n \"backend\", {None: cast(Dict[str, str], {})}\n ).items()\n ][0]", "def get_redis_backend():\r\n # From django-redis-cache.\r\n _, server, params = parse_backend_uri(settings.REDIS_BACKEND)\r\n db = params.pop('db', 1)\r\n try:\r\n db = int(db)\r\n except (ValueError, TypeError):\r\n db = 1\r\n try:\r\n socket_timeout = float(params.pop('socket_timeout'))\r\n except (KeyError, ValueError):\r\n socket_timeout = None\r\n password = params.pop('password', None)\r\n if ':' in server:\r\n host, port = server.split(':')\r\n try:\r\n port = int(port)\r\n except (ValueError, TypeError):\r\n port = 6379\r\n else:\r\n host = 'localhost'\r\n port = 6379\r\n return redislib.Redis(host=host, port=port, db=db, password=password,\r\n socket_timeout=socket_timeout)", "def backends():\n return list(loader.backend_dict.keys())\n # return loader._preference", "def get_backend_settings(backend):\n backends_settings = getattr(settings, 'GETPAID_BACKENDS_SETTINGS', {})\n try:\n return backends_settings[backend]\n except KeyError:\n return {}", "def set_backend(self, backend):\n self.backend = backend", "def resolve_backend_name(self, name):\n resolved_name = \"\"\n available = [b.name() for b in self.available_backends(filters=None)]\n grouped = self.grouped_backend_names()\n deprecated = self.deprecated_backend_names()\n aliased = self.aliased_backend_names()\n\n if name in available:\n resolved_name = name\n elif name in grouped:\n available_members = [b for b in grouped[name] if b in available]\n if available_members:\n resolved_name = available_members[0]\n elif name in deprecated:\n resolved_name = deprecated[name]\n logger.warning('WARNING: %s is deprecated. Use %s.', name, resolved_name)\n elif name in aliased:\n resolved_name = aliased[name]\n\n if resolved_name not in available:\n raise LookupError('backend \"{}\" not found.'.format(name))\n\n return resolved_name", "def get_database_engine(scheme):\n scheme_to_engine = {\n 'postgresql': 'django.db.backends.postgresql',\n 'postgres': 'django.db.backends.postgresql',\n 'mysql': 'django.db.backends.mysql',\n 'sqlite': 'django.db.backends.sqlite3',\n 'oracle': 'django.db.backends.oracle',\n }\n\n try:\n return scheme_to_engine[scheme]\n except KeyError:\n raise ValueError(\"Unsupported database '{}'\".format(scheme))", "def load_backend(backend_name):\n try:\n module_bits = backend_name.split(\".\")\n klass = module_bits.pop()\n return getattr(import_module(\".\".join(module_bits)), klass)\n except ImportError as e_user:\n # The nlp backend wasn't found. Display a helpful error message\n # listing all built-in nlp backends.\n backend_dir = str(Path(__file__).parent / 'backends')\n available_backends = [\n name for _, name, ispkg in pkgutil.iter_modules([backend_dir])\n if ispkg and name not in {'base'}\n ]\n if backend_name not in [\n 'poetaster.nlp.backends.%s' % b for b in available_backends\n ]:\n backend_reprs = map(repr, sorted(available_backends))\n raise ImproperlyConfigured(\n \"%r isn't an available nlp backend.\\n\"\n \"Try using 'poetaster.nlp.backends.X', where X is one of:\\n\"\n \" %s\" % (backend_name, \", \".join(backend_reprs))\n ) from e_user\n else:\n # If there's some other error, this must be an error in Django\n raise", "def _get_backend_device(cls, device: UsbDevice) -> Any:\n try:\n #pylint: disable-msg=protected-access\n # need to access private member _ctx of PyUSB device\n # (resource manager) until PyUSB #302 is addressed\n return device._ctx.dev\n #pylint: disable-msg=protected-access\n except AttributeError:\n return None", "def backend(anonymous_session):\n return anonymous_session", "def get_backend_disk(self, disk):\n backend_vm = self.get_backend_vm(disk.vm)\n for device in backend_vm.config.hardware.device:\n if (\n isinstance(device, vim.VirtualDisk)\n and str(device.key) == disk.backend_id\n ):\n return device", "def backend_quotes(threescale, backend_usages):\n\n return threescale.backends.read(backend_usages[1][\"backend_id\"])", "def backend_object(self, id):\n return self.model.Suite.everything.get(id=id)", "def _get_backend_module(name):\n if name == \"numpy\":\n import numpy as np\n\n return np\n if name == \"numpy.ma\":\n import numpy as np\n\n return np.ma\n if name == \"torch\":\n import torch\n\n return torch\n if name == \"jax\":\n import jax\n import jax.numpy as jnp\n\n _JAX_KEY = jax.random.PRNGKey(0)\n return jnp\n if name == \"tensorflow\":\n import tensorflow as tf\n\n return tf", "def set_backend(*backend):\n global _BACKEND\n if not backend:\n raise ValueError('Need at least one backend.')\n _BACKEND = backend", "def get_backend_vm(self, vm):\n return self._get_backend_vm(vm.backend_id)", "def get_recordings_backend(config):\n backend = config['RECORDINGS_BACKEND']\n if backend in RECORDINGS:\n return RECORDINGS[backend].ListFiles(config)", "def GetBackendService(self, args):\n objects = list(request_helper.MakeRequests(\n requests=[(self.service,\n 'Get',\n messages.ComputeBackendServicesGetRequest(\n project=self.context['project'],\n backendService=args.name\n ))],\n http=self.context['http'],\n batch_url=self.context['batch-url']))\n return objects[0]", "def select_backend(embedding_model) -> BaseEmbedder:\n # keybert language backend\n if isinstance(embedding_model, BaseEmbedder):\n return embedding_model\n\n # Flair word embeddings\n if \"flair\" in str(type(embedding_model)):\n from keybert.backend._flair import FlairBackend\n\n return FlairBackend(embedding_model)\n\n # Spacy embeddings\n if \"spacy\" in str(type(embedding_model)):\n from keybert.backend._spacy import SpacyBackend\n\n return SpacyBackend(embedding_model)\n\n # Gensim embeddings\n if \"gensim\" in str(type(embedding_model)):\n from keybert.backend._gensim import GensimBackend\n\n return GensimBackend(embedding_model)\n\n # USE embeddings\n if \"tensorflow\" and \"saved_model\" in str(type(embedding_model)):\n from keybert.backend._use import USEBackend\n\n return USEBackend(embedding_model)\n\n # Sentence Transformer embeddings\n if \"sentence_transformers\" in str(type(embedding_model)):\n return SentenceTransformerBackend(embedding_model)\n\n # Create a Sentence Transformer model based on a string\n if isinstance(embedding_model, str):\n return SentenceTransformerBackend(embedding_model)\n\n # Hugging Face embeddings\n if isinstance(embedding_model, Pipeline):\n return HFTransformerBackend(embedding_model)\n\n return SentenceTransformerBackend(\"paraphrase-multilingual-MiniLM-L12-v2\")", "def __init__(self, backend: Optional[str] = None, /, **kwargs: Any) -> None:\n if not backend:\n try:\n backend = self.__default_backend__\n except AttributeError:\n raise ValueError(\n \"You must specify which backend to use as first positional argument.\"\n )\n backend_factory: Type[T] = load_plugin(backend, self.__group__)\n self._backend: T = backend_factory(**kwargs)", "def _load_backend(backend: str) -> types.ModuleType:\n from importlib.metadata import entry_points\n\n if backend == \"matplotlib\":\n # Because matplotlib is an optional dependency and first-party backend,\n # we need to attempt an import here to raise an ImportError if needed.\n try:\n module = importlib.import_module(\"pandas.plotting._matplotlib\")\n except ImportError:\n raise ImportError(\n \"matplotlib is required for plotting when the \"\n 'default backend \"matplotlib\" is selected.'\n ) from None\n return module\n\n found_backend = False\n\n eps = entry_points()\n key = \"pandas_plotting_backends\"\n # entry_points lost dict API ~ PY 3.10\n # https://github.com/python/importlib_metadata/issues/298\n if hasattr(eps, \"select\"):\n entry = eps.select(group=key) # pyright: ignore[reportGeneralTypeIssues]\n else:\n # Argument 2 to \"get\" of \"dict\" has incompatible type \"Tuple[]\";\n # expected \"EntryPoints\" [arg-type]\n entry = eps.get(key, ()) # type: ignore[arg-type]\n for entry_point in entry:\n found_backend = entry_point.name == backend\n if found_backend:\n module = entry_point.load()\n break\n\n if not found_backend:\n # Fall back to unregistered, module name approach.\n try:\n module = importlib.import_module(backend)\n found_backend = True\n except ImportError:\n # We re-raise later on.\n pass\n\n if found_backend:\n if hasattr(module, \"plot\"):\n # Validate that the interface is implemented when the option is set,\n # rather than at plot time.\n return module\n\n raise ValueError(\n f\"Could not find plotting backend '{backend}'. Ensure that you've \"\n f\"installed the package providing the '{backend}' entrypoint, or that \"\n \"the package has a top-level `.plot` method.\"\n )", "def is_simulator_backend(backend):\n return backend.configuration().simulator", "def get_connection(backend=None, fail_silently=False, **kwargs):\n\n path = backend or settings.EMAIL_BACKEND\n try:\n mod_name, klass_name = path.rsplit('.', 1)\n mod = import_module(mod_name)\n except ImportError, e:\n raise ImproperlyConfigured(\n 'Error importing email backend module %s: \"%s\"' % (mod_name, e))\n try:\n klass = getattr(mod, klass_name)\n except AttributeError:\n raise ImproperlyConfigured(\n 'Module \"%s\" does not define a \"%s\" class' % (\n mod_name, klass_name))\n\n return klass(fail_silently=fail_silently, **kwargs)", "def get_issue(issue_number):\n backend_name = os.environ[\"ISSUE_BACKEND\"]\n backend_module = importlib.import_module(\n \"issuebranch.backends.{}\".format(backend_name)\n )\n\n return getattr(backend_module, \"Backend\")(issue_number)", "def storage_backend_get_by_id(context, id, inactive=False):\n return _find_storage_backend(context, dict(id = id), True, None, inactive=inactive)", "def get_instance():\n \"\"\"Add more judgement for selecting more database backend\"\"\"\n return IMPL", "def _instantiate_backend_from_name(name, options):\r\n # Parse backend name\r\n\r\n try:\r\n parts = name.split('.')\r\n module_name = '.'.join(parts[:-1])\r\n class_name = parts[-1]\r\n except IndexError:\r\n raise ValueError('Invalid event track backend %s' % name)\r\n\r\n # Get and verify the backend class\r\n\r\n try:\r\n module = import_module(module_name)\r\n cls = getattr(module, class_name)\r\n if not inspect.isclass(cls) or not issubclass(cls, BaseBackend):\r\n raise TypeError\r\n except (ValueError, AttributeError, TypeError, ImportError):\r\n raise ValueError('Cannot find event track backend %s' % name)\r\n\r\n backend = cls(**options)\r\n\r\n return backend", "def set_backend(name):\n # perform checks\n if name == 'autograd' and not AG_AVAILABLE:\n raise ValueError(\"Autograd backend is not available, autograd must \\\n be installed.\")\n\n # change backend by monkeypatching\n if name == 'numpy':\n backend.__class__ = NumpyBackend\n elif name == 'autograd':\n backend.__class__ = AutogradBackend\n else:\n raise ValueError(f\"unknown backend '{name}'\")", "def device(self):\n return get_device(self.module_to_quantize)", "def crypto_key_backend(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"crypto_key_backend\")", "def list_backends(_):\n backends = [b.__name__ for b in available_backends()]\n print('\\n'.join(backends))", "def is_local_backend(backend):\n return backend.configuration().local", "def get_compatible_pyplot(backend=None, debug=True):\n import matplotlib\n\n # If the backend provided is None, just default to\n # what's already being used.\n existing_backend = matplotlib.get_backend()\n if backend is not None:\n # Can this raise?...\n matplotlib.use(backend)\n\n # Print out the new backend\n if debug:\n sys.stderr.write(\"Currently using '%s' MPL backend, \"\n \"switching to '%s' backend%s\"\n % (existing_backend, backend, os.linesep))\n\n # If backend is not set via env variable, but debug is\n elif debug:\n sys.stderr.write(\"Using '%s' MPL backend%s\"\n % (existing_backend, os.linesep))\n\n from matplotlib import pyplot as plt\n return plt", "def load_backend(self, presentation=None):\r\n initialized, filename_for_frontend = self.media.load_backend(presentation)\r\n if initialized:\r\n return True, filename_for_frontend\r\n else:\r\n return False # Error something failed while loading the backend\r", "def get_driver(drv):\n return GenericDriver.get_driver(drv)" ]
[ "0.8743596", "0.8575249", "0.83286643", "0.8244357", "0.82377666", "0.8199863", "0.8199863", "0.816488", "0.8118611", "0.8118611", "0.80922145", "0.80846", "0.7978867", "0.7977651", "0.7964498", "0.7956947", "0.7936695", "0.7910934", "0.7906332", "0.7804737", "0.7777473", "0.77774185", "0.7754407", "0.76903075", "0.76815134", "0.76378816", "0.7618919", "0.7602589", "0.75347936", "0.74521685", "0.74215055", "0.73252255", "0.72910064", "0.7199422", "0.71571076", "0.70174015", "0.69946283", "0.6973999", "0.6951049", "0.69020325", "0.6879322", "0.681927", "0.67745495", "0.67423236", "0.6715579", "0.67153543", "0.67153543", "0.66563207", "0.6637806", "0.66362715", "0.6615872", "0.6605487", "0.65938276", "0.6583756", "0.654809", "0.65330136", "0.645577", "0.64474726", "0.6445189", "0.6430914", "0.6406725", "0.6398827", "0.6382863", "0.638096", "0.6380147", "0.6336109", "0.63016635", "0.62674516", "0.6260476", "0.6231604", "0.62315977", "0.6231482", "0.6225387", "0.62183255", "0.61869675", "0.6174117", "0.61699325", "0.61542416", "0.6148116", "0.61079895", "0.610509", "0.6081842", "0.60643953", "0.60605353", "0.59801227", "0.59741426", "0.5970324", "0.5922981", "0.5893029", "0.58858097", "0.584278", "0.5830185", "0.582337", "0.5821511", "0.5817623", "0.5812937", "0.5811627", "0.58079123", "0.5799588", "0.5795164" ]
0.7869946
19
Default arguments for KeyUsage.
def make_key_usage(digital_signature=False, content_commitment=False, key_encipherment=False, data_encipherment=False, key_agreement=False, key_cert_sign=False, crl_sign=False, encipher_only=False, decipher_only=False): return x509.KeyUsage(digital_signature=digital_signature, content_commitment=content_commitment, key_encipherment=key_encipherment, data_encipherment=data_encipherment, key_agreement=key_agreement, key_cert_sign=key_cert_sign, crl_sign=crl_sign, encipher_only=encipher_only, decipher_only=decipher_only)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_extra_args(self):\n super(AwsAccessListKeysMethod, self).add_extra_args()\n self.parser.add_argument(\"--key_pair_name\", required=False, default=None,\n help=\"AWS Key Pair name\")", "def add_key_arguments(parser):\n group = parser.add_argument_group(\"Key management\")\n group.add_argument(\n \"-ks\", \"--key_size\",\n help='Length of the new key',\n type=int,\n default=4096\n )\n group.add_argument(\n \"-kt\", \"--key_type\",\n help=\"Method used for generating the new key\",\n choices=[\"dsa\", \"rsa\"],\n default=\"rsa\"\n )\n return group", "def help_args():\n pass", "def _default(self):\n self.app.args.print_help()", "def _usage_options_example(self):\n pass", "def __init__(self, help_key=None):\n self.help_key = help_key", "def add_usage(self, usage, actions, groups, prefix=''):\n #if prefix is None:\n # prefix = ''\n return super(SubcommandHelpFormatter, self).add_usage(usage, actions, groups, prefix='')", "def defaults_opts(self, command):\n cmd_help = getdoc(getattr(TopLevelCommand, command))\n return docopt(cmd_help, [])", "def default(self, key):\n raise KeyError(repr(key))", "def help_default_values():\n click.echo_via_pager(docgen.generate_default_value_help())", "def _key(self):\n key_args = [self.__class__.__name__] + [str(a) for a in self.args]\n return (\":\".join(key_args))", "def defaultargs(options):\n config = {}\n for longname, default, _ in options:\n config[longname] = default\n return config", "def setdefault(self, key):\n pass", "def setKey(self, *args):\n return _libsbml.ConversionOption_setKey(self, *args)", "def test_validate_default_kw_only_follows_non_default_kw_only(self):\n fsig = FSignature(\n [forge.kwarg('a', default=None), forge.kwarg('b')],\n __validate_parameters__=False,\n )\n fsig.validate()", "def cmd_args(self):\n return [\"0\", self.name]", "def extra_target_arguments(self):\n return {}", "def def_key(x):\n return x", "def help(self, keyword):\n if (keyword == 'all'):\n string = ('%-20s%-20s%-20s%s\\n' % ('Keyword', 'Type', 'Default', 'Comment'))\n for key, value in self.allowed_keys.items():\n string += ('%-20s%-20s%-20s%s\\n' % (key, str(value[0]), str(value[1]), value[2]))\n print string", "def key_usages(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"key_usages\")", "def key_usages(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"key_usages\")", "def setup_default_arguments(self):\n self.add_argument('--clean', action='store_true',\n help='Cleans all generated files.')", "def key():\n pass", "def key():\n pass", "def do_key(self, cmd):\n self.params.set('key', cmd, 'global')", "def key(annotation: Any, *, default: Any = _Empty, param_kind: Any = _Empty):\n return annotation, default, param_kind", "def test_kw_args_with_defaults():\n assert arguments.fun_opt_kw_params() == ('blue', 'red', 'yellow', 'orange')", "def test_defaultValues(self):\n argV = []\n self.usage.parseOptions(argV)\n self.failUnlessEqual(self.usage.opts['fooint'], 392)\n self.assert_(isinstance(self.usage.opts['fooint'], int))\n self.failUnlessEqual(self.usage.opts['foofloat'], 4.23)\n self.assert_(isinstance(self.usage.opts['foofloat'], float))\n self.failUnlessEqual(self.usage.opts['eggint'], None)\n self.failUnlessEqual(self.usage.opts['eggfloat'], None)", "def _set_default_args(self):\n self._parser.add_argument(\"username\")\n self._parser.add_argument(\"password\")\n self._parser.add_argument(\n \"--start\",\n help=\"Start date for the scraper in iso format, eg: 2017-11-19\",\n type=str,\n default=None,\n )\n self._parser.add_argument(\n \"--end\",\n help=\"End date for the scraper in iso format\",\n type=str,\n default=None,\n )\n self._parser.add_argument(\n \"--skip-delete\",\n help=\"Delete the scraper folder in /tmp after run\",\n action=\"store_true\",\n )", "def createkey(*args): # {{{2\n return '-'.join(map(simplifyname, args))", "def __init__(__self__, *,\n default_mode: Optional[pulumi.Input[int]] = None,\n items: Optional[pulumi.Input[Sequence[pulumi.Input['KeyToPathArgs']]]] = None,\n optional: Optional[pulumi.Input[bool]] = None,\n secret_name: Optional[pulumi.Input[str]] = None):\n if default_mode is not None:\n pulumi.set(__self__, \"default_mode\", default_mode)\n if items is not None:\n pulumi.set(__self__, \"items\", items)\n if optional is not None:\n pulumi.set(__self__, \"optional\", optional)\n if secret_name is not None:\n pulumi.set(__self__, \"secret_name\", secret_name)", "def get_default_args(**kw):\n default_args_exp = {\n \"output_file\": \"ml_demo.c\",\n \"function_name\": \"ml_demo\",\n \"precision\": ML_Binary32,\n \"accuracy\": ML_Faithful,\n \"target\": GenericProcessor.get_target_instance()\n }\n default_args_exp.update(kw)\n return DefaultArgTemplate(**default_args_exp)", "def base_arguments(self):\n raise NotImplementedError()", "def get_default_args(**kw):\n default_args_log = {\n \"output_file\": \"POLY.c\",\n \"function_name\": \"POLY\",\n \"precision\": ML_Binary64,\n \"target\": GenericProcessor.get_target_instance(),\n \"function\": None,\n \"interval\": None,\n \"epsilon\": None\n }\n default_args_log.update(kw)\n return DefaultArgTemplate(**default_args_log)", "def usage():\n pass", "def default_argv():\n\n return ['--quick', # so no config file is loaded\n # Other defaults to minimize side effects on stdout\n '--colors=NoColor', '--no-term-title','--no-banner',\n '--autocall=0']", "def __init__(self, *args: Union[List[AtomKey], EKT], **kwargs: str) -> None:\n ...", "def ext_key_usages(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"ext_key_usages\")", "def ext_key_usages(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"ext_key_usages\")", "def add_usage(self, usage, actions, groups, prefix=None):\n if prefix is None:\n prefix = colored('Usage: \\n ', 'cyan')\n return super(ColoredHelpFormatter, self).add_usage(\n usage, actions, groups, prefix)", "def __init__(self, key, default=NOT_GIVEN):\n self.key = adapt(key,IComponentKey)\n self.default = default", "def usage():", "def usage():", "def __init__(self, *args, **kwargs):\n assert not kwargs.get('key_name'), (\n 'Setting key_name manually not supported')\n submission_key = kwargs.get('submission_key')\n assert submission_key, 'Missing required submission_key property'\n kwargs['key_name'] = self.key_name(submission_key)\n super(ReviewSummary, self).__init__(*args, **kwargs)", "def setdefault(self, k, d=None): # real signature unknown; restored from __doc__\n pass", "def key_handler(args):\n key = create_key(args.key_type, args.key_size, args.key_out)\n\n if not args.key_out:\n print(print_key(key))\n\n return key", "def __init__(self, *args: Union[List[AtomKey], HKT], **kwargs: str) -> None:\n ...", "def optionHelp(self):\n return {}", "def usage():\n print(\"[1] Getting help from a cipher \")\n print(\" ---> ./cryptogra.py caesar -h \")\n print(\"\")", "def __init__(__self__, *,\n default_mode: Optional[pulumi.Input[int]] = None,\n items: Optional[pulumi.Input[Sequence[pulumi.Input['KeyToPathArgs']]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n optional: Optional[pulumi.Input[bool]] = None):\n if default_mode is not None:\n pulumi.set(__self__, \"default_mode\", default_mode)\n if items is not None:\n pulumi.set(__self__, \"items\", items)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if optional is not None:\n pulumi.set(__self__, \"optional\", optional)", "def __init__(self, *args: Union[List[AtomKey], DKT], **kwargs: str) -> None:\n ...", "def test_exactly_implicit_default_no_args_required():\n class TestCmdLine(CmdLine):\n yaml_def = '''\n supported_options:\n - category:\n options:\n - name : test_opt\n long : test-opt\n opt : param\n default : default-value\n required : true\n '''\n test_opt = None\n args = \"util-name\"\n parse_result = TestCmdLine.parse(args)\n assert parse_result.value == ParseResultEnum.MISSING_MANDATORY_ARG.value", "def help_option(args, run):\n pass", "def __loadDefaultArgs(self):\n mod = self.__class__.__name__.lower()\n logger.debug(\"Loading default args for module: %s\" % mod)\n\n default_args = conf.get(\"modules.%s\" % mod, None)\n if not default_args:\n return\n for k, v in default_args.iteritems():\n logger.debug(\"Setting default %s arg: %s=%s\" % (mod, k, v))\n self.setArg(k, v)", "def help_usage(self):\n\t\thelptext = \"\"\"\nUSAGE\n==========\n1.) connect to server:\n\tWhen starting p22p, you dont automatically connect to a server.\n\tTo do this, use the 'connect'-command.\n\tWithout additional arguements, p22p will connect to {default}.\n\tIf you want to connect to a other server, use the following syntax:\n\t\tconnect PROTO://SERVER:PORT\n\twhere PROTO is either 'ws' or 'wss'. 'wss' is a SSL/TLS connection, ws a insecure connection.\n\tNote that the communication between to clients is always CBC-encrypted (additionaly to other encryption methods.)\n\tThe CBC-password will never be sent to the server.\n\tThe Server only receives a hash of the password.\n\n2.) join or create a Group\n\tp22p is using Group as Network-Namespaces.\n\tEach Groupmember has a unique CID. However, the CID is only unique in the Group and only unique during that clients connection.\n\tTo create a new Group, use the 'create'-command:\n\t\tcreate NAME PASSWORD [KEYFILE]\n\tThe server only receives a hash of the PASSWORD.\n\tNote that groupnames starting with a \"#\" are reserved (You cant create them except if you have the key).\n\tIf you want to create a reserved group, pass the path to the keyfile.\n\tWhen creating a Group, you will automatically join that Group.\n\t\n\tTo join a Group, use the 'join'-command:\n\t\tjoin NAME PSWD\n\tThe Server only reveives a hash of the Password.\n\n3.) relay a Port\n\tTo relay a port from your Device to a target device, use the 'relay'-command:\n\t\trelay PEER [LOCAL] REMOTE\n\tIf LOCAL is 0 or ommited, a free port is choosen.\n\tThis Command will create a socket listening to Port LOCAL on your DEVICE.\n\tOnce a connection is made to that Port, P22P will send a message to PEER, telling him to create a connection to Port REMOTE.\n\tAll data sent trough this connection will be encrypted with the Group's Password.\n\tThe Server only knows the hash of the password, meaning only Groupmembers know how to decrypt the Message.\n\tThe Server knows who should receive this message and sends it to only that Client.\n\n4.) Leaving a Group\n\tOnce you are finished, you can leave the Group.\n\tThis will close all connections to peers and free your CID.\n\tAll Groupmembers will receive a message that you left the Group.\n\tto leave a Group, use thr 'leave'-command.\n\n5.) Disconnecting\n\tIf you want to disconnect from the Server, use the 'disconnect'-command.\n\tThis will close all connections and also auto-leaves the Group (see 4.)\n\n6.) Exiting\n\tTo close this script, use the 'exit'-command.\n\tIf required, the 'disconnect'-command is invoked.\n\n7.) Additional commands\n\tTo get a list of all aviable commands, use the 'help'-command.\n\tTo get a description about a command, use the gollowing syntax:\n\t\thelp COMMAND\n\tHere are some useful commands:\n\t\tping PEER: pings a peer (not the Server.)\n\t\tlist: shows a list of all connections and relayed ports. also shows some information.\n\t\tcid: shows your current CID.\n\"\"\".format(default=DEFAULT_SERVER)\n\t\tself.stdout.write(helptext)", "def init_args(self):\n return {\n \"doc\": self.__doc__.format(name=colored(self.module_name, \"green\", attrs=['bold','underline'])),\n \"Url\": \"set a target url\",\n 'Type': \"set type to check , [php, asp, aspx, cgi, dir , mdb]\",\n }", "def key_callback(option,opt_str,value,parser):\n if \"--epi-key\" in opt_str:\n parser.values.epi_keys.append(value)\n elif \"--exclude\" in opt_str:\n parser.values.exclude_paths.append(value)", "def test_with_explicit_sample_args(self):\n test_dict = CliArgs('sample', ['-a', '26', '-s', 'somefile', '-n', '-u', 'foo', '-v']).__dict__\n self.assertEqual('foo', test_dict['user'])\n self.assertEqual(1, test_dict['verbosity'])\n self.assertEqual('26', test_dict['analyzer_profile'])", "def __init__(self, key=None):\n self.key = key", "def ext_key_usages(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"ext_key_usages\")", "def get_default_config_help(self):\n config = super(SignalfxHandler, self).get_default_config_help()\n\n config.update({\n 'url': 'Where to send metrics',\n 'batch': 'How many to store before sending',\n 'auth_token': 'Org API token to use when sending metrics',\n })\n\n return config", "def usage():\n print(\"This program runs with command line arguments.\\n\"\n \"Available parameters:\\n\"\n \"\\t-h --help : help\\n\"\n \"\\t-f : file name or path\\n\"\n \"\\t-k : key file\\n\"\n \"\\t-o : operaion\\n\"\n \"\\n\"\n \"There are 3 operations available:\\n\"\n \"\\t'1' --> add_sign() : adds hash to end of file\\n\"\n \"\\t'2' --> check() : checks if added hash and current hash are matched\\n\"\n \"\\t'3' --> remove_sign() : remove hash from end of file which has added with operion 1\\n\"\n \"\\n\"\n \"Example command: $python3 checker.py -f message.pdf -k key_file.txt -o 1\")", "def usage():\n print \"\\n\\n%s, Version %s, Encrypt/decrypt script. \" %(ME,VERSION)\n print \"Encrypts or decrypts the input file using an eight-bit key \"\n print \" \"\n print \"USAGE: %s [OPTIONS] \" %ME\n print \" \"\n print \"OPTIONS: \"\n print \" -h --help Display this message. \"\n print \" -v --verbose Runs the program in verbose mode, default: %s. \" %VERBOSE\n print \" -d --debug Runs the program in debug mode (implies verbose). \"\n print \" -k --key= Key to be used for encrypt/decrypt, default: %s. \" %KEY \n print \" Valid keys are > 0 and < 255. \"\n print \" -i --input= Input file name, REQUIRED. \" \n print \" -o --output= Output file name, default is \\\"%s\\\". \" %output_file_name\n print \" \"\n print \"EXIT CODES: \"\n print \" 0 - Successful completion of the program. \"\n print \" 1 - Bad or missing command line arguments. \"\n print \" 2 - Invalid key, key must be an integer. \"\n print \" 3 - Invalid key, key must be an integer between 0 and 255. \"\n print \" 4 - Input file not supplied, use -i or --input options. \"\n print \" 5 - Unable tocreate the output file, check file/folder permissions. \"\n print \" 6 - Main Program cannot be imported by another script. \"\n print \" \" \n print \"EXAMPLES: \" \n print \" TODO - I'll make some examples up later. \"\n print \" \"\n pass", "def cmd_help(args):", "def add_default_params(self, params):\n params['key'] = self.key\n params['format'] = self.format\n #params['unique_id'] = generate_unique_id()\n return params", "def test_at_most_default_no_args_required():\n class TestCmdLine(CmdLine):\n yaml_def = '''\n supported_options:\n - category:\n options:\n - name : test_opt\n long : test-opt\n opt : param\n default : [default1, default2]\n multi_type: at-most\n count : 3\n required : true\n '''\n test_opt = None\n args = \"util-name\"\n parse_result = TestCmdLine.parse(args)\n assert parse_result.value == ParseResultEnum.MISSING_MANDATORY_ARG.value", "def key():", "def test_exactly_explicit_default_no_args_required():\n class TestCmdLine(CmdLine):\n yaml_def = '''\n supported_options:\n - category:\n options:\n - name : test_opt\n long : test-opt\n opt : param\n default : default-value\n multi_type: exactly\n count : 1\n required : true\n '''\n test_opt = None\n args = \"util-name\"\n parse_result = TestCmdLine.parse(args)\n assert parse_result.value == ParseResultEnum.MISSING_MANDATORY_ARG.value", "def configure_args(self):\n super(InstaApriori, self).configure_args()\n self.add_passthru_arg('-iteration', type=int, help=\"The current iteration. Not used as a command line argument\")\n self.add_passthru_arg('--k', type=int, default=3, help=\"Specify the maximum size of itemsets to find\")\n self.add_passthru_arg('--s', type=float, help=\"Specify the minimum support threshold\")\n self.add_passthru_arg('--c', type=float, default=0, help=\"Specify the minimum confidence threshold\")\n self.add_file_arg('--f', default='frequent.txt',\n help=\"Specify the name of the file used to store frequent itemsets\")", "def initDefaultCommand(self):\n pass", "def __init__(\n self, subject_as_keyword: bool, arg_map: Dict[str, str], arg_strict: bool\n ):\n self.subject_as_keyword = subject_as_keyword\n self.arg_map = arg_map\n self.arg_strict = arg_strict", "def setDefaultOpts(self):\n\t\tself.getopt.s = ['h']\n\t\tself.getopt.l = ['help']\n\t\tself.getopt.s.extend([('x:', 'screen')])\n\t\tself.getopt.l.extend([('xml=', 'screen')])\n\t\treturn", "def get_helper_c_code_args(self):\r\n return Subtensor.default_helper_c_code_args()", "def test_unset_key(self):\n context = {'help_key': 'unused-key'}\n self.assertRaises(\n ImproperlyConfigured,\n tags.madcap_flare_help,\n context)", "def default_hparams():\n params = {\n \"labels_index_map_store_path\": \"/tmp/shabda/\"\n }\n return params", "def __init__(__self__, *,\n key_data: pulumi.Input[str]):\n pulumi.set(__self__, \"key_data\", key_data)", "def usage():\n print(\"Usage: \" + sys.argv[0] + \" [-t title] [-l location] \\\n [-e email] [-p password]\\n\\r\\\n -e --email=EMAIL set your email for Google account\\n\\r\\\n -p --password=PASSWORD set password for your Google account\\n\\r\\\n -t --title=TITLE set title of event\\n\\r\\\n -l --location=LOCATION set location of event\\n\\r\\\n -c --calendar=CALENDAR set calendar name \\\n (default if not specifyed)\\n\\r\\\n -h --help display this help and exit\\n\\r\\\n -v --version display script version and exit\")", "def autodefaults (self):\r\n\r\n self.defaults_from_notes(identifying_key=EMPTYCHAR,\r\n mark=EQUAL,\r\n obj=self.default_dict['commands'],\r\n entrytext=COMMANDMACROSCRIPT)", "def test_exactly_implicit_default_args_required():\n class TestCmdLine(CmdLine):\n yaml_def = '''\n supported_options:\n - category:\n options:\n - name : test_opt\n long : test-opt\n opt : param\n default : default-value\n required : true\n '''\n test_opt = None\n args = \"util-name --test-opt=cmdline\"\n parse_result = TestCmdLine.parse(args)\n assert parse_result.value == ParseResultEnum.SUCCESS.value\n assert TestCmdLine.test_opt == \"cmdline\"", "def argument(arg, default):\n return \"{0}={1}\".format(arg, default) if default else arg", "def test_at_most_default_args_required():\n class TestCmdLine(CmdLine):\n yaml_def = '''\n supported_options:\n - category:\n options:\n - name : test_opt\n long : test-opt\n opt : param\n default : [default1, default2]\n multi_type: at-most\n count : 3\n required : true\n '''\n test_opt = None\n args = \"util-name --test-opt cmdline1 cmdline2\"\n parse_result = TestCmdLine.parse(args)\n assert parse_result.value == ParseResultEnum.SUCCESS.value\n assert TestCmdLine.test_opt == [\"cmdline1\", \"cmdline2\"]", "def test_at_most_default_no_count_no_args_optional():\n class TestCmdLine(CmdLine):\n yaml_def = '''\n supported_options:\n - category:\n options:\n - name : test_opt\n long : test-opt\n opt : param\n default : default1\n multi_type: at-most\n required : false\n '''\n test_opt = None\n args = \"util-name\"\n try:\n TestCmdLine.parse(args)\n except CmdLineException as e:\n assert \"Invalid defaults supplied\" in e.args[0]", "def _GetKeyString(self):", "def _GetKeyString(self):", "def _invocation_params(self) -> Dict[str, Any]:\n return self._default_params", "def help():\n \n pass", "def default_options(cls) -> Dict:\n return {}", "def test_exactly_implicit_default_no_args_optional():\n class TestCmdLine(CmdLine):\n yaml_def = '''\n supported_options:\n - category:\n options:\n - name : test_opt\n long : test-opt\n opt : param\n default : default-value\n required : false\n '''\n test_opt = None\n args = \"util-name\"\n parse_result = TestCmdLine.parse(args)\n assert parse_result.value == ParseResultEnum.SUCCESS.value\n assert TestCmdLine.test_opt == \"default-value\"", "def help(cls, extra_args=None):\n if (_is_text_interface()):\n return _create_text_help_str(cls, cls._TEXT_USAGE)\n else:\n return cls._GRAPHICAL_USAGE", "def key(key):\n return key", "def test_explicit_usage_message(self):\n assert 'Usage:' in main('coloredlogs', '--help', capture=True)", "def test_no_limit_default_no_count_no_args_required():\n class TestCmdLine(CmdLine):\n yaml_def = '''\n supported_options:\n - category:\n options:\n - name : test_opt\n long : test-opt\n opt : param\n default : [default1, default2]\n multi_type: no-limit\n required : true\n '''\n test_opt = None\n args = \"util-name\"\n parse_result = TestCmdLine.parse(args)\n assert parse_result.value == ParseResultEnum.MISSING_MANDATORY_ARG.value", "def precmd_hook_not_enough_parameters(self) -> plugin.PrecommandData:\n pass", "def valid_args(self):\r\n for k in request.args.keys():\r\n if k not in ['api_key']:\r\n getattr(self.__class__, k)", "def get_default_arg():\n\n arg = 'cog:C_cog_space_GRP world:parts_GRP trueWorld:noXform_GRP '\n return arg", "def get_argument_as_keywords(self):\n status = True\n arg_kv = self.get_values_for_mandatory_args()\n if len(arg_kv) != len(self.req_args_list):\n msg = 'could not execute %s without mandatory arguments' % (object)\n self.data_repository = skip_and_report_status(self.data_repository, msg)\n status = False\n arg_kv = self.get_values_for_optional_args(arg_kv)\n return arg_kv, status", "def help(self):\r\n self._short_help(None, None, None, None)", "def test_create_keyword_only(self, extra_in, extra_out):\n kwargs = dict(\n interface_name='a',\n name='b',\n type=int,\n converter=dummy_converter,\n validator=dummy_validator,\n metadata={'meta': 'data'},\n )\n fparam = FParameter.create_positional_or_keyword(**kwargs, **extra_in)\n assert isinstance(fparam, FParameter)\n assert immutable.asdict(fparam) == \\\n {**FPARAM_POK_DEFAULTS, **kwargs, **extra_out}", "def __init__(__self__, *,\n key: Optional[pulumi.Input[str]] = None,\n mode: Optional[pulumi.Input[int]] = None,\n path: Optional[pulumi.Input[str]] = None):\n if key is not None:\n pulumi.set(__self__, \"key\", key)\n if mode is not None:\n pulumi.set(__self__, \"mode\", mode)\n if path is not None:\n pulumi.set(__self__, \"path\", path)", "def test_exactly_explicit_default_args_required():\n class TestCmdLine(CmdLine):\n yaml_def = '''\n supported_options:\n - category:\n options:\n - name : test_opt\n long : test-opt\n opt : param\n default : default-value\n multi_type: exactly\n count : 1\n required : true\n '''\n test_opt = None\n args = \"util-name --test-opt=cmdline\"\n parse_result = TestCmdLine.parse(args)\n assert parse_result.value == ParseResultEnum.SUCCESS.value\n assert TestCmdLine.test_opt == \"cmdline\"" ]
[ "0.63416225", "0.63310456", "0.61922413", "0.6166601", "0.60149294", "0.5977559", "0.5891303", "0.5785784", "0.57817113", "0.5779327", "0.5699099", "0.5675902", "0.56591386", "0.56465507", "0.56432736", "0.5643039", "0.5611888", "0.55910945", "0.5587594", "0.558592", "0.558592", "0.55828464", "0.55803376", "0.55803376", "0.5542747", "0.5538183", "0.55234766", "0.55218667", "0.55196834", "0.5499853", "0.5469559", "0.5464646", "0.5461128", "0.54437774", "0.54395956", "0.5435281", "0.54305255", "0.54188156", "0.54188156", "0.541795", "0.54171747", "0.5412705", "0.5412705", "0.54008305", "0.5394352", "0.5386549", "0.5385985", "0.5384711", "0.53632444", "0.5355857", "0.5349876", "0.5342345", "0.5337557", "0.53195715", "0.5312188", "0.5304152", "0.5302555", "0.52927303", "0.52911854", "0.52890086", "0.52888775", "0.5281255", "0.5273465", "0.5273044", "0.5270196", "0.526779", "0.5265909", "0.5259938", "0.5257058", "0.52501994", "0.52456", "0.52369946", "0.5235957", "0.52355456", "0.5234942", "0.52303773", "0.5226151", "0.5221783", "0.5208249", "0.5207659", "0.520186", "0.51991427", "0.5193134", "0.5193134", "0.51926965", "0.5179271", "0.5178714", "0.51735896", "0.51705647", "0.5169999", "0.5168477", "0.516673", "0.516611", "0.5162313", "0.51622945", "0.51593155", "0.51554036", "0.5147944", "0.5144762", "0.5137133" ]
0.516289
93
Main CSR creation code.
def create_x509_req(privkey, subject_info): builder = x509.CertificateSigningRequestBuilder() builder = builder.subject_name(subject_info.get_name()) builder = subject_info.install_extensions(builder) # create final request req = builder.sign(private_key=privkey, algorithm=SHA256(), backend=get_backend()) return req
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _generate_csr_and_key():\n key = rsa.generate_private_key(\n public_exponent=65537,\n key_size=2048,\n backend=default_backend())\n\n csr = x509.CertificateSigningRequestBuilder().subject_name(x509.Name([\n x509.NameAttribute(NameOID.COMMON_NAME, u\"Magnum User\"),\n ])).sign(key, hashes.SHA256(), default_backend())\n\n result = {\n 'csr': csr.public_bytes(\n encoding=serialization.Encoding.PEM).decode(\"utf-8\"),\n 'key': key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()).decode(\"utf-8\"),\n }\n\n return result", "def create_csr(dn):\n tmp_file = f'/tmp/{get_temp_filename()}'\n key_filename = f'{tmp_file}.key'\n csr_filename = f'{tmp_file}.csr'\n cmd = [\n \"openssl\",\n \"req\",\n \"-subj\", f'{dn}',\n \"-newkey\", f'rsa:{rsa_keysize}',\n \"-keyout\", f'{key_filename}',\n \"-out\", f'{csr_filename}',\n \"-nodes\"\n ]\n exec_cmd(cmd)\n return read_keypair(key_filename, csr_filename)", "def init_csr(privkey, names, cert_dir):\n csr_pem, csr_der = crypto_util.make_csr(privkey.pem, names)\n\n # Save CSR\n le_util.make_or_verify_dir(cert_dir, 0o755)\n csr_f, csr_filename = le_util.unique_file(\n os.path.join(cert_dir, \"csr-letsencrypt.pem\"), 0o644)\n csr_f.write(csr_pem)\n csr_f.close()\n\n logging.info(\"Creating CSR: %s\", csr_filename)\n\n return le_util.CSR(csr_filename, csr_der, \"der\")", "def create_pki():\n os.mkdir(pki_dir)\n os.mkdir(f'{pki_dir}/newcerts')\n Path(f'{pki_dir}/index.txt').touch()\n with open(f'{pki_dir}/serial', 'w') as serial_file:\n serial_file.write('00000000')\n serial_file.close()\n create_CA('/CN=My cool CA/O=Honest Achmed/OU=Used Cars/C=EU')", "def create_ssl_cert_request ( ssl_hostnames ) :\n first_hostname = ssl_hostnames[ 0 ]\n csr_filename = get_ssl_csr_filename( first_hostname )\n key_filename = get_ssl_key_filename( first_hostname )\n openssl_cnf = \"\"\"\n[req]\ndistinguished_name = req_distinguished_name\nreq_extensions = san_ext\n\n[req_distinguished_name]\ncountryName_default = US\nstateOrProvinceName_default = New York\nlocalityName_default = New York\norganizationalUnitName_default = Home Box Office, Inc\ncommonName_default = \"\"\" + first_hostname + \"\"\"\n\n[san_ext]\nbasicConstraints = CA:FALSE\nkeyUsage = nonRepudiation, digitalSignature, keyEncipherment\nsubjectAltName = @sans\n\n[sans]\n\"\"\"\n counter = 0\n for hostname in ssl_hostnames :\n counter += 1\n openssl_cnf += 'DNS.' + str( counter ) + ' = ' + hostname + '\\n'\n\n with open( first_hostname, 'w' ) as f :\n f.write( openssl_cnf )\n cmd = 'openssl req -new -newkey rsa:2048 -nodes -out ' + csr_filename + ' -keyout ' + key_filename\n cmd += ' -config ' + first_hostname + ' -subj \"/C=US/ST=New York/L=New York/O=Home Box Office Inc/CN=' + first_hostname + '\"'\n keygen = subprocess.call( cmd, shell = True )\n os.remove( first_hostname )\n if keygen != 0 :\n print \"Generation of SSL request failed!\"\n return None\n\n return { 'csr-filename' : csr_filename, 'key-filename' : key_filename }", "def generateNewCSR(self, fqdn, subject=None, san=None, with_new_key=False, KeyUsage=True, ExtendedKeyUsage=True):\n if with_new_key:\n self.generateNewKey()\n\n self.logger.info(\"Creating CSR for '\" + str(fqdn) + \"' with SubjectAlternativeName's: \" + str(san))\n\n csr_subject = []\n if fqdn:\n csr_subject.append(x509.NameAttribute(x509.OID_COMMON_NAME, str(fqdn)))\n if subject is not None:\n if subject.organization is not None:\n csr_subject.append(x509.NameAttribute(x509.OID_ORGANIZATION_NAME, str(subject.organization)))\n if subject.organizational_unit is not None:\n csr_subject.append(x509.NameAttribute(x509.OID_ORGANIZATIONAL_UNIT_NAME, str(subject.organizational_unit)))\n if subject.country is not None:\n csr_subject.append(x509.NameAttribute(x509.OID_COUNTRY_NAME, str(subject.country.upper())))\n if subject.state is not None:\n csr_subject.append(x509.NameAttribute(x509.OID_STATE_OR_PROVINCE_NAME, str(subject.state) ))\n if subject.locality is not None:\n csr_subject.append(x509.NameAttribute(x509.OID_LOCALITY_NAME, str(subject.locality)))\n if subject.email is not None:\n csr_subject.append(x509.NameAttribute(x509.OID_EMAIL_ADDRESS, str(subject.email)))\n\n # Generate a CSR\n csr = x509.CertificateSigningRequestBuilder()\n csr = csr.subject_name(x509.Name(csr_subject))\n csr = csr.add_extension(\n x509.BasicConstraints(ca=False, path_length=None), critical=True,\n )\n # Adding SubjectAlternativeName\n adding_san = []\n if san is not None:\n for s in san:\n adding_san.append(x509.DNSName(s))\n csr = csr.add_extension(\n x509.SubjectAlternativeName(adding_san),\n critical=False,\n )\n\n # Key Usage: digitalSignature, keyEncipherment (critical)\n if KeyUsage:\n csr = csr.add_extension(x509.KeyUsage(True, False, True, False, False, False, False, False, False),\n critical=True)\n # Extended Key Usage: TLS Web Server Authentication, TLS Web Client Authentication\n if ExtendedKeyUsage:\n csr = csr.add_extension(\n x509.ExtendedKeyUsage([x509.ExtendedKeyUsageOID.SERVER_AUTH,x509.ExtendedKeyUsageOID.CLIENT_AUTH]),\n critical=False,\n )\n\n # Sign the CSR with our private key.\n self.csr = csr.sign(self.key, hashes.SHA256(), default_backend())", "def create_csr_pss(dn):\n tmp_file = f'/tmp/{get_temp_filename()}'\n key_filename = f'{tmp_file}.key'\n csr_filename = f'{tmp_file}.csr'\n\n cmd_genpkey = [\n \"openssl\",\n \"genpkey\",\n \"-algorithm\", \"rsa-pss\",\n \"-pkeyopt\", f'rsa_keygen_bits:{rsa_keysize}',\n \"-pkeyopt\", \"rsa_keygen_pubexp:65537\",\n \"-out\", f'{key_filename}'\n ]\n cmd_req = [\n \"openssl\",\n \"req\",\n \"-new\",\n \"-subj\", f'{dn}',\n \"-key\", f'{key_filename}',\n \"-out\", f'{csr_filename}'\n ]\n for cmd in [cmd_genpkey, cmd_req]:\n exec_cmd(cmd)\n\n return read_keypair(key_filename, csr_filename)", "def generate_csr(key, domain_name):\n csr = x509.CertificateSigningRequestBuilder().subject_name(x509.Name([\n # Provide various details about who we are.\n x509.NameAttribute(NameOID.COUNTRY_NAME, u\"US\"),\n x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, u\"MA\"),\n x509.NameAttribute(NameOID.LOCALITY_NAME, u\"Boston\"),\n x509.NameAttribute(NameOID.ORGANIZATION_NAME, u\"Northeastern\"),\n x509.NameAttribute(NameOID.COMMON_NAME, domain_name),\n ])).add_extension(\n x509.SubjectAlternativeName([\n x509.DNSName(domain_name),\n ])\n ,\n critical=True,\n\n # Sign the CSR with our private key.\n ).sign(key, hashes.SHA256(), default_backend())\n\n\n # Write our CSR out to disk.\n with open(domain_name + \".csr\", \"wb\") as f:\n f.write(csr.public_bytes(serialization.Encoding.PEM))\n\n return csr", "def _init_keys(self):\n\n basic_constraints = crypto.X509Extension('basicConstraints'.encode('ascii'), True,\n 'CA:TRUE, pathlen:0'.encode('ascii'))\n serial = self._get_serial()\n pkey = self._create_pkey(self.commonname, serial)\n self._create_cert(pkey, self.commonname, serial, [basic_constraints], expire=30*365)", "def generate_csr(self, key, cn, san=None):\n csr = x509.CertificateSigningRequestBuilder().subject_name(\n self.generate_x509_name(cn)\n )\n if san:\n dns_names = self.encode_san_dns_names(san)\n csr = csr.add_extension(\n x509.SubjectAlternativeName(dns_names),\n critical=False,\n )\n return csr.sign(key, hashes.SHA256(), default_backend())", "def CreateCrtFile(keyfile, csrfile):\n crtfile = tempfile.mkstemp()[1]\n cmd = [\n 'openssl',\n 'x509',\n '-req',\n '-days', '1',\n '-in', csrfile,\n '-signkey', keyfile,\n '-out', crtfile\n ]\n _RunCommand(cmd)\n return crtfile", "def CreateCsrFile(keyfile):\n csrfile = tempfile.mkstemp()[1]\n cmd = [\n 'openssl',\n 'req',\n '-new',\n '-key', keyfile,\n '-out', csrfile,\n '-subj', '/C=NA/ST=NA/L=NA/O=Chromium/OU=Test/CN=chromium.org'\n ]\n _RunCommand(cmd)\n return csrfile", "def _create_certificate_chain():\n caext = X509Extension(b\"basicConstraints\", False, b\"CA:true\")\n not_after_date = datetime.date.today() + datetime.timedelta(days=365)\n not_after = not_after_date.strftime(\"%Y%m%d%H%M%SZ\").encode(\"ascii\")\n\n # Step 1\n cakey = PKey()\n cakey.generate_key(TYPE_RSA, 2048)\n cacert = X509()\n cacert.set_version(2)\n cacert.get_subject().commonName = \"Authority Certificate\"\n cacert.set_issuer(cacert.get_subject())\n cacert.set_pubkey(cakey)\n cacert.set_notBefore(b\"20000101000000Z\")\n cacert.set_notAfter(not_after)\n cacert.add_extensions([caext])\n cacert.set_serial_number(0)\n cacert.sign(cakey, \"sha256\")\n\n # Step 2\n ikey = PKey()\n ikey.generate_key(TYPE_RSA, 2048)\n icert = X509()\n icert.set_version(2)\n icert.get_subject().commonName = \"Intermediate Certificate\"\n icert.set_issuer(cacert.get_subject())\n icert.set_pubkey(ikey)\n icert.set_notBefore(b\"20000101000000Z\")\n icert.set_notAfter(not_after)\n icert.add_extensions([caext])\n icert.set_serial_number(0)\n icert.sign(cakey, \"sha256\")\n\n # Step 3\n skey = PKey()\n skey.generate_key(TYPE_RSA, 2048)\n scert = X509()\n scert.set_version(2)\n scert.get_subject().commonName = \"Server Certificate\"\n scert.set_issuer(icert.get_subject())\n scert.set_pubkey(skey)\n scert.set_notBefore(b\"20000101000000Z\")\n scert.set_notAfter(not_after)\n scert.add_extensions(\n [X509Extension(b\"basicConstraints\", True, b\"CA:false\")]\n )\n scert.set_serial_number(0)\n scert.sign(ikey, \"sha256\")\n\n return [(cakey, cacert), (ikey, icert), (skey, scert)]", "def sign_certificate(csr):\n unique_filename = str(uuid.uuid4().hex)\n\n file = open(\"./csr_req/%s.csr\" % unique_filename, \"w\")\n file.write(csr.decode(\"utf-8\"))\n file.close()\n\n subprocess.run([\"../ca/scripts/sign.sh\", unique_filename], check=False)\n\n file = open(\"./csr_req/%s.p7b\" % unique_filename, \"r\")\n cert = file.read()\n\n os.remove(\"./csr_req/%s.csr\" % unique_filename)\n os.remove(\"./csr_req/%s.p7b\" % unique_filename)\n\n return cert", "def add_csr(session, client_ip, location, csr_name,\n signed=False, signed_date=False,\n username='system_user'):\n session = validate_session(session)\n try:\n add_csr = CsrInfo(csr_name, client_ip, location, signed, signed_date)\n session.add(add_csr)\n session.commit()\n return add_csr\n except Exception as e:\n session.rollback()", "def _new_runtime_credentials(self, force=False, readonly=False):\n _log.debug(\"new_runtime_credentials\")\n #Create keys and certificate request\n private_key = os.path.join(self.runtime_dir, \"private\", \"private.key\")\n private = os.path.dirname(private_key)\n _log.debug(\"new_runtime: %s\" % self.runtime_dir)\n out = os.path.join(self.runtime_dir, \"{}.csr\".format(self.node_name))\n _log.debug(\"out dir: %s\"% out)\n # Create ECC-based certificate\n log = subprocess.Popen([\"openssl\", \"ecparam\", \"-genkey\",\n \"-name\", \"prime256v1\",\n \"-out\", private_key],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n stdout, stderr = log.communicate()\n if log.returncode != 0:\n raise IOError(stderr)\n\n log = subprocess.Popen([\"openssl\", \"req\", \"-new\",\n \"-config\",self.configfile,\n # \"-subj\", subject,\n \"-key\", private_key,\n \"-nodes\",\n \"-utf8\",\n \"-out\", out],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n stdout, stderr = log.communicate()\n if log.returncode != 0:\n raise IOError(stderr)\n\n return out", "def create_key ():", "def sign_csr(csr, ca_private_key, ca_cert=None, issuer_name=None,\n ca_private_key_password=None, generate_ca=False):\n backend = cryptography.hazmat.backends.default_backend()\n # Create x509 artifacts\n root_ca_pkey = serialization.load_pem_private_key(\n ca_private_key.encode(),\n password=ca_private_key_password,\n backend=backend)\n\n new_csr = cryptography.x509.load_pem_x509_csr(\n csr.encode(),\n backend)\n\n if ca_cert:\n root_ca_cert = cryptography.x509.load_pem_x509_certificate(\n ca_cert.encode(),\n backend)\n issuer_name = root_ca_cert.subject\n else:\n issuer_name = issuer_name\n # Create builder\n builder = cryptography.x509.CertificateBuilder()\n builder = builder.serial_number(\n cryptography.x509.random_serial_number())\n builder = builder.issuer_name(issuer_name)\n builder = builder.not_valid_before(\n datetime.datetime.today() - datetime.timedelta(1, 0, 0),\n )\n builder = builder.not_valid_after(\n datetime.datetime.today() + datetime.timedelta(80, 0, 0),\n )\n builder = builder.subject_name(new_csr.subject)\n builder = builder.public_key(new_csr.public_key())\n\n builder = builder.add_extension(\n cryptography.x509.BasicConstraints(ca=generate_ca, path_length=None),\n critical=True\n )\n\n # Sign the csr\n signer_ca_cert = builder.sign(\n private_key=root_ca_pkey,\n algorithm=hashes.SHA256(),\n backend=backend)\n\n return signer_ca_cert.public_bytes(encoding=serialization.Encoding.PEM)", "def send_ssl_csr ( csr_filenames ) :\n if len( csr_filenames ) < 1 :\n print \"WARNING: No CSRs found to send to IT SSL Admins. Ignoring request.\"\n return 0\n\n msg = email.mime.multipart.MIMEMultipart( )\n msg[ 'subject' ] = 'SSL Cert Request'\n msg[ 'From' ] = ssl_from_address\n msg[ 'To' ] = ', '.join( ssl_to_address )\n if len( csr_filenames ) > 1 :\n txt = email.mime.text.MIMEText( 'Hi all,\\n\\nWe would like to request SSL certificates for the following hostnames. Attached are the CSRs. Please let us know if you have any questions.\\n\\nThanks,\\nChris' )\n else :\n txt = email.mime.text.MIMEText( 'Hi all,\\n\\nWe would like to request an SSL certificate for the following hostname: ' + csr_filenames[ 0 ] + '. Attached is the CSR. Please let us know if you have any questions.\\n\\nThanks,\\nChris' )\n msg.attach( txt )\n\n for csr_filename in csr_filenames :\n with open( csr_filename, 'r' ) as fp :\n csr = email.mime.text.MIMEText( fp.read( ) )\n csr.add_header( 'Content-Disposition', 'attachment', filename = csr_filename )\n msg.attach( csr )\n mailer = smtplib.SMTP( smtp_server )\n mailer.sendmail( ssl_from_address, ssl_to_address, msg.as_string( ) )\n mailer.quit( )", "def create_CA(dn):\n cmd_genrsa = [\"openssl\",\n \"genrsa\",\n \"-aes256\",\n \"-out\", f'{pki_dir}/ca.key',\n \"-passout\", f'pass:{ca_password}',\n f'{rsa_keysize}']\n cmd_req = [\"openssl\",\n \"req\",\n \"-new\",\n \"-x509\",\n \"-days\", \"999999\",\n \"-sha256\",\n \"-key\", f'{pki_dir}/ca.key',\n \"-out\", server_key_files[\"ca\"],\n \"-subj\", f'{dn}',\n \"-passin\", f'pass:{ca_password}']\n cmds = [cmd_genrsa, cmd_req]\n for cmd in cmds:\n exec_cmd(cmd)", "def generate_root_CA():\n\n ##generating root key\n\n root_private_key = rsa.generate_private_key(\n public_exponent=65537,\n key_size=2048,\n backend=default_backend())\n\n\n ##self-sign and generate the root certificate\n\n root_public_key = root_private_key.public_key()\n builder = x509.CertificateBuilder()\n builder = builder.subject_name(x509.Name([\n x509.NameAttribute(NameOID.COMMON_NAME, u'Northeastern SSL Test CA'),\n x509.NameAttribute(NameOID.ORGANIZATION_NAME, u'Northeastern'),\n x509.NameAttribute(NameOID.ORGANIZATIONAL_UNIT_NAME, u'SSL Clock Skews'),\n ]))\n\n builder = builder.issuer_name(x509.Name([\n x509.NameAttribute(NameOID.COMMON_NAME, u'Northeastern SSL Test CA'),\n ]))\n builder = builder.not_valid_before(datetime.datetime.today() - datetime.timedelta(days=1))\n builder = builder.not_valid_after(datetime.datetime(2019, 12, 31))\n builder = builder.serial_number(int(uuid.uuid4()))\n builder = builder.public_key(root_public_key)\n builder = builder.add_extension(\n x509.BasicConstraints(ca=True, path_length=None), critical=True,)\n\n root_certificate = builder.sign(\n private_key=root_private_key, algorithm=hashes.SHA256(),\n backend=default_backend()\n )\n\n\n ##write to disk\n \n\n\n with open(\"rootCA.key\", \"wb\") as f:\n f.write(root_private_key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()\n ))\n\n with open(\"rootCA.crt\", \"wb\") as f:\n f.write(root_certificate.public_bytes(\n encoding=serialization.Encoding.PEM,\n ))\n\n return root_private_key, root_certificate", "def request_cert():\n\n api_request = shallow_copy(props)\n\n for key in ['ServiceToken', 'Region', 'Tags', 'Route53RoleArn']:\n api_request.pop(key, None)\n\n if 'ValidationMethod' in props:\n if props['ValidationMethod'] == 'DNS':\n\n # Check that we have all the hosted zone information we need to validate\n # before we create the certificate\n for name in set([props['DomainName']] + props.get('SubjectAlternativeNames', [])):\n get_zone_for(name)\n\n del api_request['DomainValidationOptions']\n\n e['PhysicalResourceId'] = acm.request_certificate(\n IdempotencyToken=i_token,\n **api_request\n )['CertificateArn']\n add_tags()", "def generate_key_and_cert():\n signing_key = rsa.generate_private_key(backend=crypto_default_backend(), public_exponent=65537, key_size=2048)\n subject = issuer = x509.Name(\n [\n x509.NameAttribute(NameOID.COUNTRY_NAME, 'NO'),\n x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, 'OSLO'),\n x509.NameAttribute(NameOID.LOCALITY_NAME, 'OSLO'),\n x509.NameAttribute(NameOID.ORGANIZATION_NAME, 'Intility AS'),\n x509.NameAttribute(NameOID.COMMON_NAME, 'intility.no'),\n ]\n )\n signing_cert = (\n x509.CertificateBuilder()\n .subject_name(subject)\n .issuer_name(issuer)\n .public_key(signing_key.public_key())\n .serial_number(x509.random_serial_number())\n .not_valid_before(datetime.utcnow())\n .not_valid_after(\n # Our certificate will be valid for 10 days\n datetime.utcnow()\n + timedelta(days=10)\n # Sign our certificate with our private key\n )\n .sign(signing_key, hashes.SHA256(), crypto_default_backend())\n .public_bytes(crypto_serialization.Encoding.DER)\n )\n return signing_key, signing_cert", "def create_server_certs_enc():\n global server_keystore, config\n\n same_enc_sign_cert = config[\"config\"][\"same_enc_sign_cert\"]\n if same_enc_sign_cert:\n dn = \"/CN=server certificate RSA\"\n else:\n dn = \"/CN=server certificate encryption RSA\"\n key_pair_rsa = create_csr(dn)\n server_keystore[\"key\"] = key_pair_rsa[\"key\"]\n san = [f'URI.1 = {uuid.uuid4().urn}']\n server_keystore[\"crt\"] = sign_csr(key_pair_rsa[\"pub\"], dn, san)", "def make_cert_for_spki_request(spki_req_b64, serial, ident):\n spki_obj = netscape_spki_from_b64(spki_req_b64)\n if spki_obj is None:\n raise ValueError('Invalid SPKI object')\n\n root_crt = _try_load_ca_cert(cfg.ca_cert_path())\n root_key = _try_load_ca_private_key(cfg.ca_private_key_path())\n crt = _make_base_cert(spki_obj.get_pubkey(), 365, ident, serial)\n crt.set_issuer(root_crt.get_subject())\n crt.sign(root_key, 'sha256')\n return crypto.dump_certificate(crypto.FILETYPE_ASN1, crt)", "def req_handler(args):\n key = _get_key(args)\n subject = get_subject_arguments()\n req = create_certificate_request(key, subject=subject, file_name=args.req_out)\n if not args.req_out:\n print(print_certificate_request(req))\n return req", "def create_server_certs_sign():\n global server_keystore\n\n dn_sign = \"/CN=server certificate sign RSA-PSS\"\n key_pair_rsa_sign = create_csr_pss(dn_sign)\n server_keystore[\"key-sign\"] = key_pair_rsa_sign[\"key\"]\n san = [f'URI.1 = {uuid.uuid4().urn}']\n server_keystore[\"crt-sign\"] = sign_csr(key_pair_rsa_sign[\"pub\"], dn_sign, san)", "def create_cert(commonname, ca_dir):\n sca = SimpleCA(ca_dir)\n sca.new_cert(commonname)", "def generate_unsigned_certificate(cn,subjaltname=None,ca=True,ksize=1024):\n\n key = crypto.PKey()\n key.generate_key(crypto.TYPE_RSA, ksize)\n\n cert = crypto.X509()\n cert.set_version(2)\n\n cn = split_cn(cn)\n for k,v in cn.items():\n setattr(cert.get_subject(),k,v)\n\n cert.set_serial_number(get_serial())\n ten_years = 10*365*24*60*60\n cert.gmtime_adj_notBefore(-ten_years)\n cert.gmtime_adj_notAfter(ten_years)\n cert.set_pubkey(key)\n\n extensions = []\n\n if subjaltname:\n extensions.append(crypto.X509Extension(b\"subjectAltName\",False,b\", \".join(subjaltname)))\n else:\n extensions.append(crypto.X509Extension(b\"subjectAltName\",False,b\"DNS:%s\" % (cn[\"CN\"].encode(\"utf-8\"),)))\n\n if ca:\n extensions.append(crypto.X509Extension(b'basicConstraints', True, b'CA:TRUE'))\n else:\n extensions.append(crypto.X509Extension(b'basicConstraints', True, b'CA:FALSE'))\n\n extensions.append(crypto.X509Extension(b'extendedKeyUsage', True, b'serverAuth,emailProtection,timeStamping'))\n extensions.append(crypto.X509Extension(b'keyUsage', False, b\"digitalSignature, keyEncipherment\"))\n extensions.append(crypto.X509Extension(b'subjectKeyIdentifier', False, b'hash', subject=cert))\n cert.add_extensions(extensions)\n\n return cert,key", "def sign_certificate_request(csr, rootkey, rootcrt, client_key, domain_name, notBefore, notAfter):\n\n serial_number = int(str(uuid.uuid4().int)[:20])\n crt = x509.CertificateBuilder().subject_name(\n csr.subject\n ).issuer_name(\n rootcrt.subject\n ).public_key(\n csr.public_key()\n ).serial_number(\n serial_number # pylint: disable=no-member\n ).not_valid_before(\n notBefore\n ).not_valid_after(\n notAfter\n ).add_extension(\n extension=x509.KeyUsage(\n digital_signature=True, key_encipherment=True, content_commitment=True,\n data_encipherment=False, key_agreement=False, encipher_only=False, decipher_only=False, key_cert_sign=False, crl_sign=False\n ),\n critical=True\n ).add_extension(\n extension=x509.BasicConstraints(ca=False, path_length=None),\n critical=True\n ).add_extension(\n extension=x509.AuthorityKeyIdentifier.from_issuer_public_key(rootkey.public_key()),\n critical=False\n ).add_extension(\n csr.extensions.get_extension_for_oid(ExtensionOID.SUBJECT_ALTERNATIVE_NAME).value,\n critical=False,\n ).sign(\n private_key=rootkey,\n algorithm=hashes.SHA256(),\n backend=default_backend()\n )\n\n ##storing client's .crt\n with open(domain_name + \".crt\", 'wb') as f:\n f.write(crt.public_bytes(encoding=serialization.Encoding.PEM))", "def __init__(__self__, *,\n certificate: Optional[pulumi.Input[str]] = None,\n csr: Optional[pulumi.Input[str]] = None,\n expires_on: Optional[pulumi.Input[str]] = None,\n hostnames: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n min_days_for_renewal: Optional[pulumi.Input[int]] = None,\n request_type: Optional[pulumi.Input[str]] = None,\n requested_validity: Optional[pulumi.Input[int]] = None):\n if certificate is not None:\n pulumi.set(__self__, \"certificate\", certificate)\n if csr is not None:\n pulumi.set(__self__, \"csr\", csr)\n if expires_on is not None:\n pulumi.set(__self__, \"expires_on\", expires_on)\n if hostnames is not None:\n pulumi.set(__self__, \"hostnames\", hostnames)\n if min_days_for_renewal is not None:\n pulumi.set(__self__, \"min_days_for_renewal\", min_days_for_renewal)\n if request_type is not None:\n pulumi.set(__self__, \"request_type\", request_type)\n if requested_validity is not None:\n pulumi.set(__self__, \"requested_validity\", requested_validity)", "def generate(name, domain, country, state, locale, email,\n keytype, keylength):\n if not domain:\n logger.error(\n \"ctl:info:generate\", \"Choose a fully-qualified domain name of the \"\n \"certificate. Must match a domain present on the system\"\n )\n domain = click.prompt(\"Domain name\")\n if not country:\n logger.info(\n \"ctl:cert:generate\",\n \"Two-character country code (ex.: 'US' or 'CA')\"\n )\n country = click.prompt(\"Country code\")\n if not state:\n state = click.prompt(\"State/Province\")\n if not locale:\n locale = click.prompt(\"City/Town/Locale\")\n if not email:\n email = click.prompt(\"Contact email [optional]\")\n try:\n cmd = client().certificates.generate\n job, data = cmd(\n name, domain, country, state, locale, email, keytype, keylength)\n handle_job(job)\n except Exception as e:\n raise CLIException(str(e))", "def handle_simpleenroll(self):\n content_length = int(self.headers['Content-Length'])\n csr = self.rfile.read(content_length)\n\n cert = sign_certificate(csr)\n\n self.set_est_rsp_header(len(cert))\n\n self.wfile.write(cert.encode('utf-8'))", "def create_selfsigned_certificates(name):\n pass", "def __enter__(self):\n self.key = self.make_new_key()\n self.sec_grp, self.group_id = self.make_new_group()\n\n printy(\"New key {} created\".format(self.key.name))\n printy(\"new security group {} created\".format(self.sec_grp.group_name))\n\n return self", "def create_keypair(address_type, addresses_path, address_prefix, name):\n vkey_file = get_vkey_file(addresses_path, address_prefix, name)\n skey_file = get_skey_file(addresses_path, address_prefix, name)\n\n if(path.exists(vkey_file)) :\n print(address_prefix, \"key pair already exists for\", name)\n return\n \n makedirs(path.dirname(vkey_file), mode=0o777, exist_ok=True)\n\n run_params = ['cardano-cli', address_type, 'key-gen', '--verification-key-file', vkey_file, '--signing-key-file', skey_file]\n subprocess_run(run_params, capture_output=False, text=True)\n return", "def main():\n parser = argparse.ArgumentParser(description=\"Wrapper for the GROMACS make_ndx module.\",\n formatter_class=lambda prog: argparse.RawTextHelpFormatter(prog, width=99999))\n parser.add_argument('-c', '--config', required=False, help=\"This file can be a YAML file, JSON file or JSON string\")\n\n # Specific args of each building block\n required_args = parser.add_argument_group('required arguments')\n required_args.add_argument('--input_structure_path', required=True)\n required_args.add_argument('--output_ndx_path', required=True)\n parser.add_argument('--input_ndx_path', required=False)\n\n args = parser.parse_args()\n config = args.config if args.config else None\n properties = settings.ConfReader(config=config).get_prop_dic()\n\n # Specific call of each building block\n make_ndx(input_structure_path=args.input_structure_path,\n output_ndx_path=args.output_ndx_path,\n input_ndx_path=args.input_ndx_path,\n properties=properties)", "def new_cert(self, commonname, extensions=None):\n\n serial = self._get_serial()\n pkey = self._create_pkey(commonname, serial)\n self._create_cert(pkey, commonname, serial, extensions)", "def genStixDoc(\n outputDir_,\n targetFileSha1_,\n targetFileSha256_,\n targetFileSha512_,\n targetFileSsdeep_,\n targetFileMd5_,\n targetFileSize_,\n targetFileName_,\n ipv4Addresses_,\n hostNames_):\n parsedTargetFileName = reFileName(targetFileName_)[1]\n parsedTargetFilePrefix = reFileName(targetFileName_)[0]\n stix.utils.set_id_namespace({\"http://www.nickdriver.com/cuckoo2CRITs\" : \"cuckoo2CRITs\"})\n NS = cybox.utils.Namespace(\"http://www.nickdriver.com/cuckoo2CRITs\", \"cuckoo2CRITs\")\n cybox.utils.set_id_namespace(NS)\n stix_package = STIXPackage()\n\n stix_header = STIXHeader()\n stix_header.title = 'File: ' + parsedTargetFileName + ' with the associated hashes, network indicators'\n stix_header.description = 'File: ' + parsedTargetFileName + ' with the associated hashes, network indicators'\n stix_package.stix_header = stix_header\n\n #Will take this out later\n # Create the ttp\n malware_instance = MalwareInstance()\n malware_instance.add_name(parsedTargetFileName)\n malware_instance.description = targetFileSha1_\n ttp = TTP(title='TTP: ' + parsedTargetFileName)\n ttp.behavior = Behavior()\n ttp.behavior.add_malware_instance(malware_instance)\n #stix_package.add_ttp(ttp)\n \n #Trying to create an array that will be added later...\n stix_observables = []\n \n #This works - leaving intact until the new portion works\n '''\n # Create the indicator for the ipv4 addresses\n ipv4Object = Address(ipv4Addresses_, Address.CAT_IPV4)\n #stix_msg['stix_observables'].extend(Observables([ipv4Object]))\n stix_observables.extend([ipv4Object])\n '''\n for ip in ipv4Addresses_:\n\t\tipv4Object = Address(ip, Address.CAT_IPV4)\n\t\tstix_observables.extend([ipv4Object])\n \n \n '''\n #This works - leaving intact until the new portion works\n # Create the indicator for the domain names\n domainNameObject = DomainName()\n domainNameObject.value = hostNames_\n '''\n for name in hostNames_:\n\t\tdomainNameObject = DomainName()\n\t\tdomainNameObject.value = name\n\t\tstix_observables.extend([domainNameObject])\n\t\t\n \n\n \n # Create the observable for the file\n fileObject = File()\n fileObject.file_name = parsedTargetFileName\n #fileObject.file_name.condition = 'Equals'\n fileObject.size_in_bytes = targetFileSize_\n #fileObject.size_in_bytes.condition = 'Equals'\n fileObject.add_hash(Hash(targetFileSha1_, type_='SHA1', exact=True))\n fileObject.add_hash(Hash(targetFileSha256_, type_='SHA256', exact=True))\n fileObject.add_hash(Hash(targetFileSha512_, type_='SHA512', exact=True))\n fileObject.add_hash(Hash(targetFileSsdeep_, type_='SSDEEP', exact=True))\n fileObject.add_hash(Hash(targetFileMd5_, type_='MD5', exact=True))\n \n stix_observables.extend([fileObject])\n \n \n stix_package.observables = Observables(stix_observables)\n \n #DEBUG\n #stagedStixDoc = stix_package.to_xml()\n #pp = pprint.PrettyPrinter(indent=4)\n #pp.pprint(stagedStixDoc)\n\t\t\n #print \"stix_observables list\"\n\n #pp.pprint(stix_observables)\n \n '''\n #VERY BASIC STIX ATTEMPT - THIS WORKS!\n a = Address(\"1.2.3.4\", Address.CAT_IPV4)\n d = DomainName()\n d.value = \"cybox.mitre.org\"\n stix_package.observables = Observables([a, d])\n #concensus - Observable does not work - ObservableS does\n '''\n\t\n\t\n\t###UNCOMMENT THIS WHEN DONE###\n\t\n \n stagedStixDoc = stix_package.to_xml()\n stagedStixDoc = fixAddressObject(stagedStixDoc)\n stagedStixDoc = fixDomainObject(stagedStixDoc)\n today = datetime.datetime.now()\n now = today.strftime('%Y-%m-%d_%H%M%S')\n if not os.path.exists(outputDir_):\n os.makedirs(outputDir_)\n with open (outputDir_ + '/' + now + '-' + targetFileSha1_ + '.stix.xml', 'a') as myfile:\n myfile.write(stagedStixDoc)\n _l.debug('Wrote file: ' + now + '-' + targetFileSha1_ + '.stix.xml')\n \n return", "def __init__(__self__, *,\n csr: pulumi.Input[str],\n hostnames: pulumi.Input[Sequence[pulumi.Input[str]]],\n request_type: pulumi.Input[str],\n min_days_for_renewal: Optional[pulumi.Input[int]] = None,\n requested_validity: Optional[pulumi.Input[int]] = None):\n pulumi.set(__self__, \"csr\", csr)\n pulumi.set(__self__, \"hostnames\", hostnames)\n pulumi.set(__self__, \"request_type\", request_type)\n if min_days_for_renewal is not None:\n pulumi.set(__self__, \"min_days_for_renewal\", min_days_for_renewal)\n if requested_validity is not None:\n pulumi.set(__self__, \"requested_validity\", requested_validity)", "def create_self_signed_cert():\n\n # create a key pair\n k = crypto.PKey()\n k.generate_key(crypto.TYPE_RSA, 1024)\n\n # create a self-signed cert\n cert = crypto.X509()\n cert.get_subject().C = \"GP\"\n cert.get_subject().ST = \"GRAD_PROJECT\"\n cert.get_subject().L = \"GRAD_PROJECT\"\n cert.get_subject().OU = \"GRAD_PROJECT\"\n cert.get_subject().CN = gethostname()\n cert.set_serial_number(1000)\n cert.gmtime_adj_notBefore(0)\n cert.gmtime_adj_notAfter(10*365*24*60*60)\n cert.set_issuer(cert.get_subject())\n cert.set_pubkey(k)\n cert.sign(k, 'sha1')\n\n cert_file = open(CERT_FILE, \"wb\")\n cert_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))\n cert_file.close()\n\n key_file = open(KEY_FILE, \"wb\")\n key_file.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))\n key_file.close()", "def create_samfile(self):", "def create_certs(application_name, ip, issuer_name, signing_key):\n logging.info(\"Creating cert for {}\".format(application_name))\n # The IP is used as the CN for backward compatability and as an\n # alternative_name for forward comapability.\n (key, cert) = zaza.openstack.utilities.cert.generate_cert(\n ip,\n issuer_name=ISSUER_NAME,\n alternative_names=[ip],\n signing_key=signing_key)\n APP_CERT_DIR = os.path.join(CERT_DIR, application_name)\n if not os.path.exists(APP_CERT_DIR):\n os.makedirs(APP_CERT_DIR)\n write_cert(APP_CERT_DIR, 'cert.pem', cert)\n write_cert(APP_CERT_DIR, 'cert.key', key)", "def _generate_certificates(certfile_path: str, keyfile_path: str,\n common_name: str) -> None:\n ca_key = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM,\n _CA_KEY)\n ca_cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM,\n _CA_CERT)\n\n k = OpenSSL.crypto.PKey()\n k.generate_key(OpenSSL.crypto.TYPE_RSA, 4096)\n\n cert = OpenSSL.crypto.X509()\n cert.get_subject().C = 'US'\n cert.get_subject().CN = common_name\n cert.set_serial_number(random.randint(0, 2**64))\n cert.gmtime_adj_notBefore(0)\n cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60)\n cert.set_issuer(ca_cert.get_subject())\n cert.set_pubkey(k)\n cert.sign(ca_key, 'sha512')\n with open(certfile_path, \"w\") as f:\n f.write(\n OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM,\n cert).decode(\"utf-8\"))\n f.write(_CA_CERT)\n with open(keyfile_path, \"w\") as f:\n f.write(\n OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM,\n k).decode(\"utf-8\"))", "def do_sign(subject_csr, issuer_obj, issuer_key, days, path_length, reqInfo, reset_info=None):\n # Certificate duration\n if days is None:\n die(\"Need --days\")\n if days <= 0:\n die(\"Invalid --days\")\n\n # Load CA info\n issuer_info = CertInfo(load=issuer_obj)\n\n # Load certificate request\n subject_info = CertInfo(load=subject_csr)\n if reset_info:\n subject_info = reset_info\n\n # Check CA parameters\n if not same_pubkey(subject_csr, issuer_obj):\n if not issuer_info.ca:\n die(\"Issuer must be CA.\")\n if 'key_cert_sign' not in issuer_info.usage:\n die(\"Issuer CA is not allowed to sign certs.\")\n if subject_info.ca:\n if not same_pubkey(subject_csr, issuer_obj):\n # not self-signing, check depth\n if issuer_info.path_length == 0:\n die(\"Issuer cannot sign sub-CAs\")\n if issuer_info.path_length - 1 < path_length:\n die(\"--path-length not allowed by issuer\")\n\n # Load subject's public key, check sanity\n pkey = subject_csr.public_key()\n if isinstance(pkey, ec.EllipticCurvePublicKey):\n pkeyinfo = 'ec:' + str(pkey.curve.name)\n if pkey.curve.name not in EC_CURVES:\n die(\"Curve not allowed: %s\", pkey.curve.name)\n elif isinstance(pkey, rsa.RSAPublicKey):\n pkeyinfo = 'rsa:' + str(pkey.key_size)\n if pkey.key_size < MIN_RSA_BITS or pkey.key_size > MAX_RSA_BITS:\n die(\"RSA size not allowed: %s\", pkey.key_size)\n else:\n die(\"Unsupported public key: %s\", str(pkey))\n\n # Report\n if subject_info.ca:\n msg('Signing CA cert [%s] - %s', pkeyinfo, reqInfo)\n else:\n msg('Signing end-entity cert [%s] - %s', pkeyinfo, reqInfo)\n msg('Issuer name: %s', render_name(issuer_info.subject))\n msg('Subject:')\n subject_info.show(msg_show)\n\n # Load CA private key\n if not same_pubkey(issuer_key, issuer_obj):\n die(\"--ca-private-key does not match --ca-info data\")\n\n # Stamp request\n cert = create_x509_cert(issuer_key, subject_csr.public_key(), subject_info, issuer_info, days=days)\n return cert", "def csr(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"csr\")", "def acme_certificate(self, csr_der):\n logging.info(\"Preparing and sending CSR...\")\n return self.network.send_and_receive_expected(\n messages.CertificateRequest.create(\n csr=jose.ComparableX509(\n M2Crypto.X509.load_request_der_string(csr_der)),\n key=jose.HashableRSAKey(Crypto.PublicKey.RSA.importKey(\n self.authkey.pem))),\n messages.Certificate)", "def main():\n # This have specific paths to prevent abitrary binaries from being\n # executed. The \"gsi\"* utilities are configured to use either grid proxies\n # or ssh, automatically.\n remoteLoginCmd = \"/usr/bin/gsissh\"\n remoteCopyCmd = \"/usr/bin/gsiscp\"\n\n UNKNOWN_PLATFORM_EXIT_CODE = 10\n MISSING_PBS_CONFIG_EXIT_CODE = 20\n\n p = AllocatorParser(sys.argv[0])\n platform = p.getPlatform()\n\n creator = Allocator(platform, p.getArgs(), \"$HOME/.lsst/condor-info.py\")\n\n platformPkgDir = lsst.utils.getPackageDir(\"ctrl_platform_\"+platform)\n configName = os.path.join(platformPkgDir, \"etc\", \"config\", \"pbsConfig.py\")\n execConfigName = os.path.join(platformPkgDir, \"etc\", \"config\", \"execConfig.py\")\n\n creator.load(execConfigName)\n\n creator.loadPbs(configName)\n\n verbose = creator.isVerbose()\n \n pbsName = os.path.join(platformPkgDir, \"etc\", \"templates\", \"generic.pbs.template\")\n generatedPbsFile = creator.createPbsFile(pbsName)\n\n condorFile = os.path.join(platformPkgDir, \"etc\", \"templates\", \"glidein_condor_config.template\")\n generatedCondorConfigFile = creator.createCondorConfigFile(condorFile)\n\n scratchDirParam = creator.getScratchDirectory()\n template = Template(scratchDirParam)\n scratchDir = template.substitute(USER_HOME=creator.getUserHome())\n userName = creator.getUserName()\n \n hostName = creator.getHostName()\n\n utilityPath = creator.getUtilityPath()\n\n #\n # execute copy of PBS file to XSEDE node\n #\n cmd = \"%s %s %s@%s:%s/%s\" % (remoteCopyCmd, generatedPbsFile, userName, hostName, scratchDir, os.path.basename(generatedPbsFile))\n if verbose:\n print cmd\n exitCode = runCommand(cmd, verbose)\n if exitCode != 0:\n print \"error running %s to %s.\" % (remoteCopyCmd, hostName)\n sys.exit(exitCode)\n\n #\n # execute copy of Condor config file to XSEDE node\n #\n cmd = \"%s %s %s@%s:%s/%s\" % (remoteCopyCmd, generatedCondorConfigFile, userName, hostName, scratchDir, os.path.basename(generatedCondorConfigFile))\n if verbose:\n print cmd\n exitCode = runCommand(cmd, verbose)\n if exitCode != 0:\n print \"error running %s to %s.\" % (remoteCopyCmd, hostName)\n sys.exit(exitCode)\n\n #\n # execute qsub command on XSEDE node to perform Condor glide-in\n #\n cmd = \"%s %s@%s %s/qsub %s/%s\" % (remoteLoginCmd, userName, hostName, utilityPath, scratchDir, os.path.basename(generatedPbsFile))\n if verbose:\n print cmd\n exitCode = runCommand(cmd, verbose)\n if exitCode != 0:\n print \"error running %s to %s.\" % (remoteLoginCmd, hostName)\n sys.exit(exitCode)\n\n nodes = creator.getNodes()\n slots = creator.getSlots()\n wallClock = creator.getWallClock()\n nodeString = \"\"\n if int(nodes) > 1:\n nodeString = \"s\"\n print \"%s node%s will be allocated on %s with %s slots per node and maximum time limit of %s\" % (nodes, nodeString, platform, slots, wallClock)\n print \"Node set name:\"\n print creator.getNodeSetName()\n sys.exit(0)", "def generate_selfsigned_ca(clustername):\n\n from datetime import datetime, timedelta\n import ipaddress\n\n from cryptography import x509\n from cryptography.x509.oid import NameOID\n from cryptography.hazmat.primitives import hashes\n from cryptography.hazmat.backends import default_backend\n from cryptography.hazmat.primitives import serialization\n from cryptography.hazmat.primitives.asymmetric import rsa\n \n # Generate key\n key = rsa.generate_private_key(\n public_exponent=65537,\n key_size=2048,\n backend=default_backend(),\n )\n \n name = x509.Name([\n x509.NameAttribute(NameOID.COMMON_NAME, unicode(clustername))\n ])\n \n # path_len=1 means that this certificate can sign one level of sub-certs\n basic_contraints = x509.BasicConstraints(ca=True, path_length=1)\n now = datetime.utcnow()\n cert = (\n x509.CertificateBuilder()\n .subject_name(name)\n .issuer_name(name)\n .public_key(key.public_key())\n .serial_number(1)\n .not_valid_before(now)\n .not_valid_after(now + timedelta(days=10*365))\n .add_extension(basic_contraints, False)\n .sign(key, hashes.SHA256(), default_backend())\n )\n\n cert_pem = cert.public_bytes(encoding=serialization.Encoding.PEM)\n\n key_pem = key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption(),\n )\n\n return cert_pem, key_pem", "def keygen(\n args: argparse.Namespace,\n config: KSKMConfig,\n p11modules: KSKM_P11,\n logger: logging.Logger,\n) -> bool:\n logger.info(\"Generate key\")\n flags = FlagsDNSKEY.ZONE.value | FlagsDNSKEY.SEP.value\n dnssec_alg = AlgorithmDNSSEC[args.key_alg]\n if is_algorithm_rsa(dnssec_alg):\n if args.key_size is None:\n raise argparse.ArgumentError(\n args.key_size, \"RSA key generation requires key size\"\n )\n p11key = generate_rsa_key(\n flags, args.key_size, p11modules, label=args.key_label\n )\n elif is_algorithm_ecdsa(dnssec_alg):\n crv = algorithm_to_curve(dnssec_alg)\n p11key = generate_ec_key(flags, crv, p11modules, label=args.key_label)\n else:\n raise ValueError(f\"Unknown key algorithm {repr(args.key_alg)}\")\n\n if not p11key or not p11key.public_key:\n raise RuntimeError(\"No public key returned by key generation\")\n\n # Calculate the DNSSEC key tag of the new key and look for a collision in the configuration\n key_tags: List[int] = []\n _key = public_key_to_dnssec_key(\n key=p11key.public_key,\n key_identifier=p11key.label,\n algorithm=AlgorithmDNSSEC[args.key_alg],\n flags=FlagsDNSKEY.SEP.value | FlagsDNSKEY.ZONE.value,\n ttl=config.ksk_policy.ttl,\n )\n logger.info(\n f\"Generated key {p11key.label} has key tag {_key.key_tag} for algorithm={_key.algorithm}, \"\n f\"flags=0x{_key.flags:x}\"\n )\n key_tags += [_key.key_tag]\n _revoked_key = public_key_to_dnssec_key(\n key=p11key.public_key,\n key_identifier=p11key.label,\n algorithm=AlgorithmDNSSEC[args.key_alg],\n flags=FlagsDNSKEY.SEP.value | FlagsDNSKEY.ZONE.value | FlagsDNSKEY.REVOKE.value,\n ttl=config.ksk_policy.ttl,\n )\n logger.info(\n f\"Generated key {p11key.label} has key tag {_revoked_key.key_tag} with the REVOKE bit set \"\n f\"(flags 0x{_revoked_key.flags:x})\"\n )\n key_tags += [_revoked_key.key_tag]\n\n for _name, ksk in config.ksk_keys.items():\n if ksk.key_tag in key_tags:\n logger.error(\n f\"Generated key {p11key.label} has key tags {key_tags} matching \"\n f\"KSK key in configuration: {ksk}\"\n )\n raise RuntimeError(\"Key tag collision detected\")\n\n _now = datetime.utcnow()\n # create_trustanchor_keydigest wants an KSKKey, but it is not used in the digest calculation\n _temp_ksk = KSKKey(\n description=\"Newly generated key\",\n label=_now.isoformat(),\n key_tag=_key.key_tag,\n algorithm=_key.algorithm,\n valid_from=_now,\n valid_until=_now,\n )\n _domain = \".\"\n _ds = create_trustanchor_keydigest(_temp_ksk, _key, domain=_domain)\n digest = binascii.hexlify(_ds.digest).decode(\"UTF-8\").upper()\n _digest_type = \"2\" # create_trustanchor_keydigest always does SHA256\n logger.info(\n f\"DS record for generated key:\\n\"\n f\"{_domain} IN DS {_key.key_tag} {_key.algorithm.value} {_digest_type} {digest}\\n\"\n f\">> {' '.join(pgp_wordlist(_ds.digest))}\"\n )\n\n return True", "def _issue(cls, keypair, subject_key, serial, sia, aia, crldp, notAfter,\n cn, sn, resources, is_ca, aki, issuer_name, notBefore, eku):\n\n if sia is not None:\n assert len(sia) == 4 and sia[3]\n sia = tuple((str(s),) if isinstance(s, (str, unicode)) else s for s in sia)\n\n now = rpki.sundial.now()\n ski = subject_key.get_SKI()\n\n if notBefore is None:\n notBefore = now\n\n if cn is None:\n cn = \"\".join((\"%02X\" % ord(i) for i in ski))\n\n if now >= notAfter:\n raise rpki.exceptions.PastNotAfter(\"notAfter value %s is already in the past\" % notAfter)\n\n if notBefore >= notAfter:\n raise rpki.exceptions.NullValidityInterval(\"notAfter value %s predates notBefore value %s\" %\n (notAfter, notBefore))\n\n cert = rpki.POW.X509()\n\n cert.setVersion(2)\n cert.setSerial(serial)\n cert.setIssuer(issuer_name.get_POW())\n cert.setSubject(X501DN.from_cn(cn, sn).get_POW())\n cert.setNotBefore(notBefore)\n cert.setNotAfter(notAfter)\n cert.setPublicKey(subject_key.get_POW())\n cert.setSKI(ski)\n cert.setAKI(aki)\n cert.setCertificatePolicies((rpki.oids.id_cp_ipAddr_asNumber,))\n\n if crldp is not None:\n cert.setCRLDP((crldp,))\n\n if aia is not None:\n cert.setAIA((aia,))\n\n if is_ca:\n cert.setBasicConstraints(True, None)\n cert.setKeyUsage(frozenset((\"keyCertSign\", \"cRLSign\")))\n\n else:\n cert.setKeyUsage(frozenset((\"digitalSignature\",)))\n\n assert sia is not None or not is_ca\n\n if sia is not None:\n caRepository, rpkiManifest, signedObject, rpkiNotify = sia\n cert.setSIA(\n (caRepository,) if isinstance(caRepository, str) else caRepository,\n (rpkiManifest,) if isinstance(rpkiManifest, str) else rpkiManifest,\n (signedObject,) if isinstance(signedObject, str) else signedObject,\n (rpkiNotify,) if isinstance(rpkiNotify, str) else rpkiNotify)\n\n if resources is not None:\n cert.setRFC3779(\n asn = (\"inherit\" if resources.asn.inherit else\n ((r.min, r.max) for r in resources.asn)),\n ipv4 = (\"inherit\" if resources.v4.inherit else\n ((r.min, r.max) for r in resources.v4)),\n ipv6 = (\"inherit\" if resources.v6.inherit else\n ((r.min, r.max) for r in resources.v6)))\n\n if eku is not None:\n assert not is_ca\n cert.setEKU(eku)\n\n cert.sign(keypair.get_POW(), rpki.POW.SHA256_DIGEST)\n\n return cls(POW = cert)", "def csc():\n endcaps = [1,2]\n disks = [1,2,3,4]\n rings = {1:[1,2,3], # different rings for different disks\n 2:[1,2], \n 3:[1,2],\n 4:[1,2]}\n\n csc_info = {\n \"endcaps\":endcaps,\n \"disks\": disks,\n \"rings\": rings}\n\n return csc_info", "def createCertRequest(pkey, digest=\"sha256\", **name):\n req = crypto.X509Req()\n subj = req.get_subject()\n\n for key, value in name.items():\n setattr(subj, key, value)\n\n req.set_pubkey(pkey)\n req.sign(pkey, digest)\n return req", "def _create_Address_Validation_Request_xml(self,cr, uid, data_for_Address_Validation_Request,data_for_Access_Request):\n doc1 = Document()\n AccessRequest = doc1.createElement(\"AccessRequestxml\")\n AccessRequest.setAttribute(\"xml:lang\", \"en-US\")\n doc1.appendChild(AccessRequest)\n\n AccessLicenseNumber = doc1.createElement(\"AccessLicenseNumber\")\n ptext = doc1.createTextNode(data_for_Access_Request[\"AccessLicenseNumber\"])\n AccessLicenseNumber.appendChild(ptext)\n AccessRequest.appendChild(AccessLicenseNumber)\n\n UserId = doc1.createElement(\"UserId\")\n ptext = doc1.createTextNode(data_for_Access_Request[\"UserId\"])\n UserId.appendChild(ptext)\n AccessRequest.appendChild(UserId)\n\n Password = doc1.createElement(\"Password\")\n ptext = doc1.createTextNode(data_for_Access_Request[\"Password\"])\n Password.appendChild(ptext)\n AccessRequest.appendChild(Password)\n\n doc = Document()\n\n #creating AddressValidationRequest tag\n AddressValidationRequest = doc.createElement(\"AddressValidationRequest\")\n AddressValidationRequest.setAttribute(\"xml:lang\", \"en-US\")\n doc.appendChild(AddressValidationRequest)\n\n #creating Request tag XMLpath=/AddressValidationRequest/Request\n Request = doc.createElement(\"Request\")\n AddressValidationRequest.appendChild(Request)\n\n #creating TransactionReference tag XMLpath=AddressValidationRequest/Request/TransactionReference\n TransactionReference = doc.createElement(\"TransactionReference\")\n Request.appendChild(TransactionReference)\n\n #creating CustomerContext tag XMLpath=/AddressValidationRequest/Request/TransactionReference/CustomerContext\n CustomerContext = doc.createElement(\"CustomerContext\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['Request']['Transaction Reference'][\"CustomerContext\"])\n CustomerContext.appendChild(ptext)\n TransactionReference.appendChild(CustomerContext)\n\n #creating XpciVersion tag XMLpath=AddressValidationRequest/Request/TransactionReference/XpciVersion\n XpciVersion = doc.createElement(\"XpciVersion\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['Request']['Transaction Reference'][\"XpciVersion\"])\n XpciVersion.appendChild(ptext)\n TransactionReference.appendChild(XpciVersion)\n\n #creating ToolVersion tag XMLpath=AddressValidationRequest/Request/TransactionReference/ToolVersion\n ToolVersion = doc.createElement(\"ToolVersion\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['Request']['Transaction Reference'][\"ToolVersion\"])\n ToolVersion.appendChild(ptext)\n TransactionReference.appendChild(ToolVersion)\n\n #creating RequestAction tag XMLpath=AddressValidationRequest/Request/RequestAction\n RequestAction = doc.createElement(\"RequestAction\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['Request'][\"RequestAction\"])\n RequestAction.appendChild(ptext)\n Request.appendChild(RequestAction)\n\n #creating RequestOption tag XMLpath=AddressValidationRequest/Request/RequestOption\n RequestOption = doc.createElement(\"RequestOption\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['Request'][\"RequestOption\"])\n RequestOption.appendChild(ptext)\n Request.appendChild(RequestOption)\n\n #creating RequestOption tag XMLpath=AddressValidationRequest/MaximumListSize\n MaximumListSize = doc.createElement(\"MaximumListSize\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request[\"MaximumListSize\"])\n MaximumListSize.appendChild(ptext)\n AddressValidationRequest.appendChild(MaximumListSize)\n\n #creating AddressKeyFormat tag XMLpath=AddressValidationRequest/AddressKeyFormat\n AddressKeyFormat = doc.createElement(\"AddressKeyFormat\")\n AddressValidationRequest.appendChild(AddressKeyFormat)\n\n #creating ConsigneeName tag XMLpath=AddressValidationRequest/AddressKeyFormat/ConsigneeName\n ConsigneeName = doc.createElement(\"ConsigneeName\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['AddressKeyFormat'][\"ConsigneeName\"])\n ConsigneeName.appendChild(ptext)\n AddressKeyFormat.appendChild(ConsigneeName)\n\n #creating BuildingName tag XMLpath=AddressValidationRequest/AddressKeyFormat/BuildingName\n BuildingName = doc.createElement(\"BuildingName\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['AddressKeyFormat'][\"BuildingName\"])\n BuildingName.appendChild(ptext)\n BuildingName.appendChild(BuildingName)\n\n #creating AddressLine tag XMLpath=AddressValidationRequest/AddressKeyFormat/AddressLine\n AddressLine = doc.createElement(\"AddressLine\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['AddressKeyFormat'][\"AddressLine1\"])\n AddressLine.appendChild(ptext)\n AddressKeyFormat.appendChild(AddressLine)\n\n AddressLine = doc.createElement(\"AddressLine\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['AddressKeyFormat'][\"AddressLine2\"])\n AddressLine.appendChild(ptext)\n AddressKeyFormat.appendChild(AddressLine)\n\n #creating Region tag XMLpath=AddressValidationRequest/AddressKeyFormat/Region\n Region = doc.createElement(\"Region\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['AddressKeyFormat'][\"Region\"])\n Region.appendChild(ptext)\n AddressKeyFormat.appendChild(Region)\n\n #creating PoliticalDivision2 tag XMLpath=AddressValidationRequest/AddressKeyFormat/PoliticalDivision2\n PoliticalDivision2 = doc.createElement(\"PoliticalDivision2\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['AddressKeyFormat'][\"PoliticalDivision2\"])\n PoliticalDivision2.appendChild(ptext)\n AddressKeyFormat.appendChild(PoliticalDivision2)\n\n #creating PoliticalDivision1 tag XMLpath=AddressValidationRequest/AddressKeyFormat/PoliticalDivision1\n PoliticalDivision1 = doc.createElement(\"PoliticalDivision1\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['AddressKeyFormat'][\"PoliticalDivision1\"])\n PoliticalDivision1.appendChild(ptext)\n AddressKeyFormat.appendChild(PoliticalDivision1)\n\n #creating PostcodePrimaryLow tag XMLpath=AddressValidationRequest/AddressKeyFormat/PostcodePrimaryLow\n PostcodePrimaryLow = doc.createElement(\"PostcodePrimaryLow\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['AddressKeyFormat'][\"PostcodePrimaryLow\"])\n PostcodePrimaryLow.appendChild(ptext)\n AddressKeyFormat.appendChild(PostcodePrimaryLow)\n\n #creating PostcodeExtendedLow tag XMLpath=AddressValidationRequest/AddressKeyFormat/PostcodeExtendedLow\n PostcodeExtendedLow = doc.createElement(\"PostcodeExtendedLow\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['AddressKeyFormat'][\"PostcodeExtendedLow\"])\n PostcodeExtendedLow.appendChild(ptext)\n AddressKeyFormat.appendChild(PostcodeExtendedLow)\n\n #creating PostcodeExtendedLow tag XMLpath=AddressValidationRequest/AddressKeyFormat/Urbanization\n Urbanization = doc.createElement(\"Urbanization\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['AddressKeyFormat'][\"Urbanization\"])\n Urbanization.appendChild(ptext)\n AddressKeyFormat.appendChild(Urbanization)\n\n #creating CountryCode tag XMLpath=AddressValidationRequest/AddressKeyFormat/CountryCode\n CountryCode = doc.createElement(\"CountryCode\")\n ptext = doc.createTextNode(data_for_Address_Validation_Request['AddressKeyFormat'][\"CountryCode\"])\n CountryCode.appendChild(ptext)\n AddressKeyFormat.appendChild(CountryCode)\n\n Request_string1=doc1.toprettyxml()\n Request_string2=doc.toprettyxml()\n Request_string=Request_string1+Request_string2\n return Request_string", "def new_X509( # pylint: disable=invalid-name\n country_name: str = \"US\",\n state_or_province_name: str = \"New York\",\n locality: str = \"New York\",\n organization_name: str = \"mitm\",\n organization_unit_name: str = \"mitm\",\n common_name: str = \"mitm\",\n serial_number: Optional[int] = None,\n time_not_before: int = 0, # 0 means now.\n time_not_after: int = 1 * (365 * 24 * 60 * 60), # 1 year.\n) -> OpenSSL.crypto.X509:\n\n cert = OpenSSL.crypto.X509()\n cert.get_subject().C = country_name\n cert.get_subject().ST = state_or_province_name\n cert.get_subject().L = locality\n cert.get_subject().O = organization_name\n cert.get_subject().OU = organization_unit_name\n cert.get_subject().CN = common_name\n cert.set_serial_number(serial_number or random.randint(0, 2**64 - 1))\n cert.set_version(2)\n cert.gmtime_adj_notBefore(time_not_before)\n cert.gmtime_adj_notAfter(time_not_after)\n cert.set_issuer(cert.get_subject())\n return cert", "def create():", "def create():", "def genKeys():\r\n (pub, priv) = rsa.newkeys(256)\r\n context = {\r\n 'pub': pub,\r\n 'priv': priv\r\n }\r\n return context", "def check_valid_request_ca(self):\n\n self.check_valid_request_common()\n\n alg = self.get_POW().getSignatureAlgorithm()\n bc = self.get_POW().getBasicConstraints()\n eku = self.get_POW().getEKU()\n sia = self.get_POW().getSIA()\n\n if alg != rpki.oids.sha256WithRSAEncryption:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 has bad signature algorithm for CA: %s\" % alg)\n\n if bc is None or not bc[0] or bc[1] is not None:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA bad basicConstraints\")\n\n if eku is not None:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA EKU not allowed\")\n\n if sia is None:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA missing\")\n\n caRepository, rpkiManifest, signedObject, rpkiNotify = sia\n\n logger.debug(\"check_valid_request_ca(): sia: %r\", sia)\n\n if signedObject:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA must not have id-ad-signedObject\")\n\n if not caRepository:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA must have id-ad-caRepository\")\n\n if not any(uri.startswith(\"rsync://\") for uri in caRepository):\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA id-ad-caRepository contains no rsync URIs\")\n\n if any(uri.startswith(\"rsync://\") and not uri.endswith(\"/\") for uri in caRepository):\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA id-ad-caRepository does not end with slash\")\n\n if not rpkiManifest:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA must have id-ad-rpkiManifest\")\n\n if not any(uri.startswith(\"rsync://\") for uri in rpkiManifest):\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA id-ad-rpkiManifest contains no rsync URIs\")\n\n if any(uri.startswith(\"rsync://\") and uri.endswith(\"/\") for uri in rpkiManifest):\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA id-ad-rpkiManifest ends with slash\")\n\n if any(not uri.startswith(\"http://\") and not uri.startswith(\"https://\") for uri in rpkiNotify):\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA id-ad-rpkiNotify neither HTTP nor HTTPS\")", "def Certificate(self) -> _n_8_t_0:", "def Certificate(self) -> _n_8_t_0:", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n csr: Optional[pulumi.Input[str]] = None,\n hostnames: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n min_days_for_renewal: Optional[pulumi.Input[int]] = None,\n request_type: Optional[pulumi.Input[str]] = None,\n requested_validity: Optional[pulumi.Input[int]] = None,\n __props__=None):\n ...", "def create_server_certs():\n global server_key_files, server_keystore, config\n\n same_enc_sign_cert = config[\"config\"][\"same_enc_sign_cert\"]\n if not Path(server_key_files[\"key\"]).is_file() or not Path(server_key_files[\"crt\"]).is_file():\n print(\"create new encryption cert\\n\")\n create_server_certs_enc()\n for f_item in [\"key\", \"crt\"]:\n with open(server_key_files[f_item], \"w\") as f:\n f.write(server_keystore[f_item])\n f.close()\n else:\n for f_item in [\"key\", \"crt\"]:\n with open(server_key_files[f_item], \"r\") as f:\n server_keystore[f_item] = f.read()\n f.close()\n\n server_keystore[\"key-sign\"] = server_keystore[\"key\"]\n server_keystore[\"crt-sign\"] = server_keystore[\"crt\"]\n\n if not Path(server_key_files[\"key-sign\"]).is_file() or not Path(server_key_files[\"crt-sign\"]).is_file():\n print(\"create new signing cert\\n\")\n if not same_enc_sign_cert:\n create_server_certs_sign()\n for f_item in [\"key-sign\", \"crt-sign\"]:\n with open(server_key_files[f_item], \"w\") as f:\n f.write(server_keystore[f_item])\n f.close()\n else:\n for f_item in [\"key-sign\", \"crt-sign\"]:\n with open(server_key_files[f_item], \"r\") as f:\n server_keystore[f_item] = f.read()\n f.close()", "def CSR_ADD( A, B ):\n # sort indices\n if not A.has_sorted_indices:\n A.sort_indices()\n if not B.has_sorted_indices:\n B.sort_indices\n\n CSRA = mkl.mkl_dcsradd\n\n shift = 1\n\n # variables relating to a, b, beta\n na = A.count_nonzero()\n sha = A.shape\n m = pointer(c_int(sha[0]))\n n = pointer(c_int(sha[1]))\n a = A.data.ctypes.data_as(POINTER(c_double))\n _ja = (A.indices + shift).astype(np.int32)\n ja = _ja.ctypes.data_as(POINTER(c_int))\n _ia = (A.indptr + shift).astype(np.int32)\n ia = _ia.ctypes.data_as(POINTER(c_int))\n\n beta = byref(c_double(1.0))\n\n b = B.data.ctypes.data_as(POINTER(c_double))\n _jb = (B.indices + shift).astype(np.int32)\n jb = _jb.ctypes.data_as(POINTER(c_int))\n _ib = (B.indptr + shift).astype(np.int32)\n ib = _ib.ctypes.data_as(POINTER(c_int))\n nzmax = byref(c_int(0))\n\n # dummy output variables for first call\n c = np.empty(1, dtype=np.float64)\n jc = np.empty(1, dtype=np.int32)\n ic = np.empty(sha[0] + 1, dtype=np.int32)\n pc = c.ctypes.data_as(POINTER(c_double))\n pjc = jc.ctypes.data_as(POINTER(c_int))\n pic = ic.ctypes.data_as(POINTER(c_int))\n\n # setup variables for MKL call\n trans = pointer(c_char(b\"N\"))\n request = pointer(c_int(1))\n sort = pointer(c_int(3))\n info = pointer(c_int(0))\n\n # call once to compute number of values in the ouput\n CSRA(trans, request, sort, m, n, a, ja, ia, beta, b, jb, ib, \\\n pc, pjc, pic, nzmax, info)\n\n # allocate memory\n nc = pic[m[0]] - 1\n c = np.empty(nc, dtype=np.float64)\n jc = np.empty(nc, dtype=np.int32)\n pc = c.ctypes.data_as(POINTER(c_double))\n pjc = jc.ctypes.data_as(POINTER(c_int))\n request = pointer(c_int(2))\n sort = pointer(c_int(0))\n info = pointer(c_int(0))\n\n # call once more to compute sum\n CSRA(trans, request, sort, m, n, a, ja, ia, beta, b, jb, ib, \\\n pc, pjc, pic, nzmax, info)\n\n # construct matrix from the data\n return sparse.csr_matrix((c, jc-shift, ic-shift), shape=sha)", "def CreateRequests(self, args):\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, self.resources)\n certificate = file_utils.ReadFile(args.certificate, 'certificate')\n private_key = file_utils.ReadFile(args.private_key, 'private key')\n\n request = self.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=self.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=self.project)\n\n return [request]", "def create_sandesh_req(self, obj, filename):\n msghdr = \"<?xml version=\\\"1.0\\\"?><test><test_name> \" + \\\n \"sandesh req</test_name><message>\"\n msgfooter = \"</message></test>\"\n # open the file\n with open(filename, 'w') as fh:\n try:\n # write msg hdr\n fh.write(msghdr)\n # write sandesh xml output of the obj\n obj.write(self.get_xml_proto_file_handle(fh))\n fh.write(msgfooter)\n except Exception as e:\n self.logger.info(\"Failed to write sandesh req file\")\n self.logger.info(e)\n try:\n self.replace_sandesh_obj_name(obj, filename)\n subprocess.call(\"xmllint --format \" + filename +\n \" --output \" + filename, shell=True)\n except Exception as e:\n self.logger.error(\"Failed to format xml output\")", "def create_sparse_matrix(self, filename, matrix_length, density):\n pos = 0\n aux_pos = 0\n matrix = []\n pointerB = []\n pointerE = []\n columns = []\n values = []\n\n for i in range(0, matrix_length):\n row = []\n pointerB.append(pos)\n aux_pos = pos\n for j in range(0, matrix_length):\n probability = random.random()\n if probability < density:\n pos += 1\n val = random.randint(1, 10)\n values.append(val)\n columns.append(j)\n else:\n val = 0\n row.append(val)\n matrix.append(row)\n pointerE.append(pos)\n vector_b = SparseMatrix.gen_vector(matrix_length)\n matrix_A = np.matrix(matrix)\n vector_res = np.dot(matrix_A, vector_b).reshape(matrix_length, 1)\n data = {\"values\": values, \"columns\": columns, \"pointerB\": pointerB, \"pointerE\": pointerE}\n CSR_A = json.dumps(data)\n '''\n print(\"x: \", vector_x)\n print(\"A: \", matrix_A)\n print(\"b: \", vector_b)\n data = {\"values\": values, \"columns\": columns, \"pointerB\": pointerB, \"pointerE\": pointerE}\n data_json = json.dumps(data)\n file = open(filename, 'w')\n file.write(data_json)\n file.close()\n np.savetxt(\"vector.txt\", vector_x, fmt=\"%1.9f\", delimiter=\" \")\n '''\n return matrix_A, CSR_A, vector_b, vector_res", "def csr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"csr\")", "def create_cert(self, cert_file, key_file):\n if os.path.isfile(cert_file) and os.path.isfile(key_file):\n return cert_file, key_file\n\n k = crypto.PKey()\n k.generate_key(crypto.TYPE_RSA, 2048)\n cert = crypto.X509()\n cert.get_subject().C = \"US\"\n cert.get_subject().ST = \"CO\"\n cert.get_subject().L = \"Denver\"\n cert.get_subject().CN = gethostname()\n cert.get_subject().O = \"Metropolitan State University of Denver\"\n cert.get_subject().OU = \"Computer Science\"\n cert.set_serial_number(6)\n cert.gmtime_adj_notBefore(0)\n cert.gmtime_adj_notAfter(365*24*60*60)\n cert.set_issuer(cert.get_subject())\n cert.set_pubkey(k)\n cert.sign(k, 'sha1')\n\n open(join(cert_file), 'w').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))\n open(join(key_file), \"w\").write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))\n return cert_file, key_file", "def main() :\n #fname = '/reg/d/psdm/CXI/cxi35711/hdf5/cxi35711-r0009.h5'\n #dsname = '/Configure:0000/Run:0000/CalibCycle:0000/CsPad::ElementV2/CxiDs1.0:Cspad.0/data'\n #event = 1\n\n fname = '/reg/d/psdm/CXI/cxi37411/hdf5/cxi37411-r0039.h5'\n dsname = '/Configure:0000/Run:0000/CalibCycle:0000/CsPad::ElementV2/CxiDsd.0:Cspad.0/data'\n event = 1\n\n print 'Default CSPad configuration pars:'\n cspadconfig.printCSPadConfigPars()\n\n print '\\nCSPad configuration pars: for fname, dsname, event =\\n', fname, '\\n', dsname, '\\n', event\n cspadconfig.setCSPadConfiguration( fname, dsname, event ) # This will set current CSPad configuration\n cspadconfig.printCSPadConfigPars()", "def generate(env):\n\n gcc.generate(env)\n\n # Set up standard folder locations\n env.SetDefault(SDK_TOOLS = env['TOOLS_ROOT'] + '/tools')\n env.SetDefault(KCC_DIR = env['SDK_TOOLS'] + '/kcc/bin')\n\n env['KCC'] = _detect(env)\n env['AS'] = '$KCC'\n env['CC'] = '$KCC'\n env['OBJSUFFIX'] = '.o'\n env['BUILDERS']['AsmObject'] = _kccAsmBuilder", "def gen_keys():", "def create_x509_cert(privkey, pubkey, subject_info, issuer_info, days):\n if not isinstance(subject_info, CertInfo):\n info = CertInfo()\n info.load_from_existing(subject_info)\n subject_info = info\n if not isinstance(issuer_info, CertInfo):\n info = CertInfo()\n info.load_from_existing(issuer_info)\n issuer_info = info\n\n dt_now = datetime.utcnow()\n dt_start = dt_now - timedelta(hours=1)\n dt_end = dt_now + timedelta(days=days)\n\n builder = (x509.CertificateBuilder()\n .subject_name(subject_info.get_name())\n .issuer_name(issuer_info.get_name())\n .not_valid_before(dt_start)\n .not_valid_after(dt_end)\n .serial_number(int(uuid.uuid4()))\n .public_key(pubkey))\n\n builder = subject_info.install_extensions(builder)\n\n # SubjectKeyIdentifier\n ext = x509.SubjectKeyIdentifier.from_public_key(pubkey)\n builder = builder.add_extension(ext, critical=False)\n\n # AuthorityKeyIdentifier\n ext = x509.AuthorityKeyIdentifier.from_issuer_public_key(privkey.public_key())\n builder = builder.add_extension(ext, critical=False)\n\n # IssuerAlternativeName\n if issuer_info.san:\n ext = x509.IssuerAlternativeName(issuer_info.get_san_gnames())\n builder = builder.add_extension(ext, critical=False)\n\n # final cert\n cert = builder.sign(private_key=privkey, algorithm=SHA256(), backend=get_backend())\n return cert", "def generate():", "def request_issuance(self, csr):\n action = LOG_ACME_REQUEST_CERTIFICATE()\n with action.context():\n return (\n DeferredContext(\n self._client.post(\n self.directory[csr], csr,\n content_type=DER_CONTENT_TYPE,\n headers=Headers({b'Accept': [DER_CONTENT_TYPE]})))\n .addCallback(self._expect_response, http.CREATED)\n .addCallback(self._parse_certificate)\n .addActionFinish())", "def main():\n try:\n return run_sysca(sys.argv[1:])\n except InvalidCertificate as ex:\n die(str(ex))", "def test_001_create_empty(self):\n ret = svcmgr.main(argv=[\"create\", \"-s\", SVCNAME])\n assert ret == 0", "def initial_setup():\n\n if os.path.exists(cfg.ca_private_key_path()):\n pkey = _try_load_ca_private_key(cfg.ca_private_key_path())\n else:\n pkey = _generate_ca_private_key(cfg.ca_private_key_path())\n\n if os.path.exists(cfg.ca_cert_path()):\n _try_load_ca_cert(cfg.ca_cert_path())\n else:\n _generate_ca_cert(cfg.ca_cert_path(), pkey)", "def _create_certificate_form(self, user, department, data=None, index=1, key=None):\n key = key or time.time()\n form = CertificateForm(user, department, data, prefix='gc{}'.format(key), index=index)\n return key, form", "def __init__(__self__,\n resource_name: str,\n args: CertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def create_tsigkey(self, context, tsigkey):\n\n if tsigkey['algorithm'] not in TSIG_SUPPORTED_ALGORITHMS:\n raise exceptions.NotImplemented('Unsupported algorithm')\n\n tsigkey_m = models.TsigKey()\n\n tsigkey_m.update({\n 'designate_id': tsigkey['id'],\n 'name': tsigkey['name'],\n 'algorithm': tsigkey['algorithm'],\n 'secret': base64.b64encode(tsigkey['secret'])\n })\n\n tsigkey_m.save(self.session)\n\n # NOTE(kiall): Prepare and execute query to install this TSIG Key on\n # every domain. We use a manual query here since anything\n # else would be impossibly slow.\n query_select = select([\n models.Domain.__table__.c.id,\n \"'TSIG-ALLOW-AXFR'\",\n \"'%s'\" % tsigkey['name']]\n )\n\n columns = [\n models.DomainMetadata.__table__.c.domain_id,\n models.DomainMetadata.__table__.c.kind,\n models.DomainMetadata.__table__.c.content,\n ]\n\n query = InsertFromSelect(models.DomainMetadata.__table__, query_select,\n columns)\n\n # NOTE(kiall): A TX is required for, at the least, SQLite.\n self.session.begin()\n self.session.execute(query)\n self.session.commit()", "def regen_cert(self, student, course_id, course=None, forced_grade=None, template_file=None):\r\n # TODO: when del_cert is implemented and plumbed through certificates\r\n # repo also, do a deletion followed by a creation r/t a simple\r\n # recreation. XXX: this leaves orphan cert files laying around in\r\n # AWS. See note in the docstring too.\r\n try:\r\n certificate = GeneratedCertificate.objects.get(user=student, course_id=course_id)\r\n certificate.status = status.unavailable\r\n certificate.save()\r\n except GeneratedCertificate.DoesNotExist:\r\n pass\r\n\r\n return self.add_cert(student, course_id, course, forced_grade, template_file)", "def create_request(v1):\n #get entered data\n data = request.get_json()\n\n #picking the request attributes\n req_title = data.get(\"request_title\")\n req_desc = data.get(\"request_description\")\n requester_name = \"Gideon\"\n req_id = len(all_requests) +1 # + random.randint(1, 3000)\n\n #validation\n if not req_title:\n return jsonify({\"message\": \"Request has no title\"}), 400\n if not req_desc:\n return jsonify({\"message\": \"Request has no description\"}), 400\n if not requester_name:\n return jsonify({\"message\": \"Request must be issued by a user\"}), 400\n if not req_id:\n return jsonify({\"message\": \"Request has no id\"}), 400\n\n #storing entered request\n new_request = MaintenanceRequest(req_title, req_desc, requester_name, req_id)\n all_requests.append(new_request)\n # new_number_of_requests = len(all_requests)\n\n return jsonify({\n \"message\":\"sucessfully created request\",\n 'request_title':new_request.title,\n \"request_description\":new_request.description,\n \"requester_name\" : new_request.requester_name,\n \"request_id\" : new_request.request_id\n })", "def main():\n config = get_config(CONFIG_FILENAME)\n print(\"Creating IAM role\")\n role = create_iam_role(config)\n print(\"Creating redshift cluster\")\n create_redshift_cluster(config, role)", "def create_token(filename):\n\n try:\n os.makedirs(os.path.dirname(filename))\n except Exception:\n pass\n\n sk = ecdsa.SigningKey.generate(curve=ecdsa.NIST256p)\n vk = sk.verifying_key\n if vk is not None:\n line = encode_line(\"signing-key\", sk.to_der(), vk.to_der())\n\n with open(filename, \"w\") as f:\n f.write(line)", "def fusion_api_generate_certificate_signing_request(self, body, api=None, headers=None):\n return self.wsc.post(body, api=api, headers=headers)", "def CreateStarCert(filename, log = logging):\n temp1 = tempfile.mkstemp(prefix = 'ssl_proxy')\n temp2 = tempfile.mkstemp(prefix = 'ssl_proxy')\n\n cert_fields = { \"C\": \"US\", \"ST\": \"**INSECURE CONNECTION**\",\n \"L\": \"**INSECURE CONNECTION**\",\n \"O\": \"**INSECURE CONNECTION**\",\n \"OU\": \"**INSECURE CONNECTION**\",\n \"CN\": \"*\" }\n\n cert_valid_days = 1\n\n cert_string = '/C=%(C)s/ST=%(ST)s/L=%(L)s/O=%(O)s/OU=%(OU)s/CN=%(CN)s' % \\\n cert_fields\n\n openssl_command = 'openssl req -newkey rsa:1024 -keyout \"%s\" -nodes ' \\\n '-x509 -days 365 -out \"%s\" -subj \"%s\" -set_serial 0 -days %s ' \\\n '-batch' % (temp1[1], temp2[1], cert_string, cert_valid_days)\n\n find_openssl = os.system('which openssl > /dev/null')\n\n if not find_openssl == 0:\n log.error('Could not find openssl. (Used \"which openssl\" to search)')\n raise OSError, 'Command \"which openssl\" returned: %s' % find_openssl\n\n log.info('Running command: %s' % openssl_command)\n openssl_status = os.system(openssl_command)\n if not openssl_status == 0:\n raise OSError, 'Attempt to run openssl returned: %s' % openssl_status\n\n # Extract the keys into strings.\n key = os.read(temp1[0], 2048)\n cert = os.read(temp2[0], 2048)\n\n os.close(temp1[0])\n os.close(temp2[0])\n\n os.unlink(temp1[1])\n os.unlink(temp2[1])\n\n new_cert = open(filename, 'wb')\n new_cert.write('%s\\n%s' % (key, cert))\n\n new_cert.close()\n\n log.info('Successfully created %s' % filename)\n return True", "def generate_self_signed_certificate(self, key, cn, validity, san=None):\n _validity = min(validity, self.settings['max_validity_days'])\n subject = self.generate_x509_name(cn)\n issuer = subject\n # x509.CertificateBuilder functions return modified versions of the\n # object, so it's weirdly meant to be chained as function calls, making\n # this look weirdly javascript-like.\n cert = x509.CertificateBuilder(\n ).subject_name(\n subject,\n ).issuer_name(\n issuer,\n ).public_key(\n key.public_key(),\n ).serial_number(\n x509.random_serial_number(),\n ).not_valid_before(\n datetime.datetime.utcnow(),\n ).not_valid_after(\n datetime.datetime.utcnow() + datetime.timedelta(days=_validity),\n )\n if san:\n dns_names = self.encode_san_dns_names(san)\n cert = cert.add_extension(\n x509.SubjectAlternativeName(dns_names),\n critical=False,\n )\n return cert.sign(key, hashes.SHA256(), default_backend())", "def generate_ca(valid_attributes):\n attr_list = valid_attributes.split(',')\n nb_attributes = len(attr_list)\n\n gen_g1 = G1.generator()\n gen_g2 = G2.generator()\n exp = [G1.order().random() for _ in range(nb_attributes + 1)]\n\n pk = [gen_g1] + [gen_g1 ** i for i in exp[1:]] + [gen_g2] + [gen_g2 ** i for i in exp]\n sk = gen_g1 ** exp[0]\n\n sk = [sk, pk, attr_list]\n pk = [pk, attr_list]\n\n\n return (jsonpickle.encode(pk).encode(), jsonpickle.encode(sk).encode())", "def create(cls, keypair, exts = None, is_ca = False,\n caRepository = None, rpkiManifest = None, signedObject = None,\n cn = None, sn = None, eku = None, rpkiNotify = None):\n\n if cn is None:\n cn = \"\".join((\"%02X\" % ord(i) for i in keypair.get_SKI()))\n\n req = rpki.POW.PKCS10()\n req.setVersion(0)\n req.setSubject(X501DN.from_cn(cn, sn).get_POW())\n req.setPublicKey(keypair.get_POW())\n\n if is_ca:\n req.setBasicConstraints(True, None)\n req.setKeyUsage(cls.expected_ca_keyUsage)\n\n sia = (caRepository, rpkiManifest, signedObject, rpkiNotify)\n if not all(s is None for s in sia):\n req.setSIA(*tuple([str(s)] if isinstance(s, (str, unicode)) else s for s in sia))\n\n if eku:\n req.setEKU(eku)\n\n req.sign(keypair.get_POW(), rpki.POW.SHA256_DIGEST)\n return cls(POW = req)", "def generate(env):\n\n # FIXME: this is already too late\n #if env.get('quiet', False):\n # quietCommandLines(env)\n\n # shortcuts\n debug = env['debug']\n machine = env['machine']\n platform = env['platform']\n x86 = env['machine'] == 'x86'\n gcc = env['platform'] in ('linux', 'freebsd', 'darwin')\n msvc = env['platform'] in ('windows', 'winddk', 'wince')\n\n # Tool\n if platform == 'winddk':\n env.Tool('winddk')\n elif platform == 'wince':\n env.Tool('wcesdk')\n else:\n env.Tool('default')\n\n # Put build output in a separate dir, which depends on the current\n # configuration. See also http://www.scons.org/wiki/AdvancedBuildExample\n build_topdir = 'build'\n build_subdir = env['platform']\n if env['dri']:\n build_subdir += \"-dri\"\n if env['llvm']:\n build_subdir += \"-llvm\"\n if env['machine'] != 'generic':\n build_subdir += '-' + env['machine']\n if env['debug']:\n build_subdir += \"-debug\"\n if env['profile']:\n build_subdir += \"-profile\"\n build_dir = os.path.join(build_topdir, build_subdir)\n # Place the .sconsign file in the build dir too, to avoid issues with\n # different scons versions building the same source file\n env['build'] = build_dir\n env.SConsignFile(os.path.join(build_dir, '.sconsign'))\n\n # C preprocessor options\n cppdefines = []\n if debug:\n cppdefines += ['DEBUG']\n else:\n cppdefines += ['NDEBUG']\n if env['profile']:\n cppdefines += ['PROFILE']\n if platform == 'windows':\n cppdefines += [\n 'WIN32',\n '_WINDOWS',\n '_UNICODE',\n 'UNICODE',\n # http://msdn2.microsoft.com/en-us/library/6dwk3a1z.aspx,\n 'WIN32_LEAN_AND_MEAN',\n 'VC_EXTRALEAN',\n '_CRT_SECURE_NO_DEPRECATE',\n ]\n if debug:\n cppdefines += ['_DEBUG']\n if platform == 'winddk':\n # Mimic WINDDK's builtin flags. See also:\n # - WINDDK's bin/makefile.new i386mk.inc for more info.\n # - buildchk_wxp_x86.log files, generated by the WINDDK's build\n # - http://alter.org.ua/docs/nt_kernel/vc8_proj/\n cppdefines += [\n ('_X86_', '1'),\n ('i386', '1'),\n 'STD_CALL',\n ('CONDITION_HANDLING', '1'),\n ('NT_INST', '0'),\n ('WIN32', '100'),\n ('_NT1X_', '100'),\n ('WINNT', '1'),\n ('_WIN32_WINNT', '0x0501'), # minimum required OS version\n ('WINVER', '0x0501'),\n ('_WIN32_IE', '0x0603'),\n ('WIN32_LEAN_AND_MEAN', '1'),\n ('DEVL', '1'),\n ('__BUILDMACHINE__', 'WinDDK'),\n ('FPO', '0'),\n ]\n if debug:\n cppdefines += [('DBG', 1)]\n if platform == 'wince':\n cppdefines += [\n '_CRT_SECURE_NO_DEPRECATE',\n '_USE_32BIT_TIME_T',\n 'UNICODE',\n '_UNICODE',\n ('UNDER_CE', '600'),\n ('_WIN32_WCE', '0x600'),\n 'WINCEOEM',\n 'WINCEINTERNAL',\n 'WIN32',\n 'STRICT',\n 'x86',\n '_X86_',\n 'INTERNATIONAL',\n ('INTLMSG_CODEPAGE', '1252'),\n ]\n if platform == 'windows':\n cppdefines += ['PIPE_SUBSYSTEM_WINDOWS_USER']\n if platform == 'winddk':\n cppdefines += ['PIPE_SUBSYSTEM_WINDOWS_DISPLAY']\n if platform == 'wince':\n cppdefines += ['PIPE_SUBSYSTEM_WINDOWS_CE']\n env.Append(CPPDEFINES = cppdefines)\n\n # C preprocessor includes\n if platform == 'winddk':\n env.Append(CPPPATH = [\n env['SDK_INC_PATH'],\n env['DDK_INC_PATH'],\n env['WDM_INC_PATH'],\n env['CRT_INC_PATH'],\n ])\n\n # C compiler options\n cflags = []\n if gcc:\n if debug:\n cflags += ['-O0', '-g3']\n else:\n cflags += ['-O3', '-g3']\n if env['profile']:\n cflags += ['-pg']\n if env['machine'] == 'x86':\n cflags += [\n '-m32',\n #'-march=pentium4',\n '-mmmx', '-msse', '-msse2', # enable SIMD intrinsics\n #'-mfpmath=sse',\n ]\n if env['machine'] == 'x86_64':\n cflags += ['-m64']\n cflags += [\n '-Wall',\n '-Wmissing-prototypes',\n '-Wno-long-long',\n '-ffast-math',\n '-pedantic',\n '-fmessage-length=0', # be nice to Eclipse\n ]\n if msvc:\n # See also:\n # - http://msdn.microsoft.com/en-us/library/19z1t1wy.aspx\n # - cl /?\n if debug:\n cflags += [\n '/Od', # disable optimizations\n '/Oi', # enable intrinsic functions\n '/Oy-', # disable frame pointer omission\n ]\n else:\n cflags += [\n '/Ox', # maximum optimizations\n '/Oi', # enable intrinsic functions\n '/Ot', # favor code speed\n #'/fp:fast', # fast floating point \n ]\n if env['profile']:\n cflags += [\n '/Gh', # enable _penter hook function\n '/GH', # enable _pexit hook function\n ]\n cflags += [\n '/W3', # warning level\n #'/Wp64', # enable 64 bit porting warnings\n ]\n if env['machine'] == 'x86':\n cflags += [\n #'/QIfist', # Suppress _ftol\n #'/arch:SSE2', # use the SSE2 instructions\n ]\n if platform == 'windows':\n cflags += [\n # TODO\n ]\n if platform == 'winddk':\n cflags += [\n '/Zl', # omit default library name in .OBJ\n '/Zp8', # 8bytes struct member alignment\n '/Gy', # separate functions for linker\n '/Gm-', # disable minimal rebuild\n '/WX', # treat warnings as errors\n '/Gz', # __stdcall Calling convention\n '/GX-', # disable C++ EH\n '/GR-', # disable C++ RTTI\n '/GF', # enable read-only string pooling\n '/G6', # optimize for PPro, P-II, P-III\n '/Ze', # enable extensions\n '/Gi-', # disable incremental compilation\n '/QIfdiv-', # disable Pentium FDIV fix\n '/hotpatch', # prepares an image for hotpatching.\n #'/Z7', #enable old-style debug info\n ]\n if platform == 'wince':\n # See also C:\\WINCE600\\public\\common\\oak\\misc\\makefile.def\n cflags += [\n '/Zl', # omit default library name in .OBJ\n '/GF', # enable read-only string pooling\n '/GR-', # disable C++ RTTI\n '/GS', # enable security checks\n # Allow disabling language conformance to maintain backward compat\n #'/Zc:wchar_t-', # don't force wchar_t as native type, instead of typedef\n #'/Zc:forScope-', # don't enforce Standard C++ for scoping rules\n #'/wd4867',\n #'/wd4430',\n #'/MT',\n #'/U_MT',\n ]\n # Automatic pdb generation\n # See http://scons.tigris.org/issues/show_bug.cgi?id=1656\n env.EnsureSConsVersion(0, 98, 0)\n env['PDB'] = '${TARGET.base}.pdb'\n env.Append(CFLAGS = cflags)\n env.Append(CXXFLAGS = cflags)\n\n # Assembler options\n if gcc:\n if env['machine'] == 'x86':\n env.Append(ASFLAGS = ['-m32'])\n if env['machine'] == 'x86_64':\n env.Append(ASFLAGS = ['-m64'])\n\n # Linker options\n linkflags = []\n if gcc:\n if env['machine'] == 'x86':\n linkflags += ['-m32']\n if env['machine'] == 'x86_64':\n linkflags += ['-m64']\n if platform == 'winddk':\n # See also:\n # - http://msdn2.microsoft.com/en-us/library/y0zzbyt4.aspx\n linkflags += [\n '/merge:_PAGE=PAGE',\n '/merge:_TEXT=.text',\n '/section:INIT,d',\n '/opt:ref',\n '/opt:icf',\n '/ignore:4198,4010,4037,4039,4065,4070,4078,4087,4089,4221',\n '/incremental:no',\n '/fullbuild',\n '/release',\n '/nodefaultlib',\n '/wx',\n '/debug',\n '/debugtype:cv',\n '/version:5.1',\n '/osversion:5.1',\n '/functionpadmin:5',\n '/safeseh',\n '/pdbcompress',\n '/stack:0x40000,0x1000',\n '/driver',\n '/align:0x80',\n '/subsystem:native,5.01',\n '/base:0x10000',\n\n '/entry:DrvEnableDriver',\n ]\n if env['profile']:\n linkflags += [\n '/MAP', # http://msdn.microsoft.com/en-us/library/k7xkk3e2.aspx\n ]\n if platform == 'wince':\n linkflags += [\n '/nodefaultlib',\n #'/incremental:no',\n #'/fullbuild',\n '/entry:_DllMainCRTStartup',\n ]\n env.Append(LINKFLAGS = linkflags)\n\n # Default libs\n env.Append(LIBS = [])\n\n # Custom builders and methods\n createConvenienceLibBuilder(env)\n createCodeGenerateMethod(env)\n createInstallMethods(env)\n\n # for debugging\n #print env.Dump()", "def assemble(self, *args, **kwargs):\n assert self.form is not None\n logger.info(\"Assembling '{}'.\".format(self.form.__name__))\n mat = COOData._assemble_scipy_csr(*self._assemble(*args, **kwargs))\n logger.info(\"Assembling finished.\")\n return mat", "def _new_opensslconf(self):\n# print \"new_opensslconf\"\n _log.debug(\"__init__::new_opensslconf\")\n for section in self.__class__.DEFAULT.keys():\n self.config.add_section(section)\n# print \"[{}]\".format(section)\n hostname = socket.gethostname()\n for option in self.__class__.DEFAULT[section]:\n if option == \"0.organizationName\":\n value = self.domain\n #TODO: use dynamic number of DNS entries instead of hardcoding the number\n elif option == \"DNS.1\":\n value = self.node_name\n elif (option == \"DNS.2\") and len(self.hostnames)>0:\n value = self.hostnames[0]\n elif (option == \"DNS.3\") and len(self.hostnames)>1:\n value = self.hostnames[1]\n elif (option == \"DNS.4\") and len(self.hostnames)>2:\n value = self.hostnames[2]\n elif option == \"IP.1\":\n value = self.ip\n elif option == \"dir\":\n value = self.runtime_dir\n elif section == 'req_distinguished_name' and option == 'commonName':\n value = self.node_name\n elif option == 'dnQualifier':\n value = self.node_id\n #The python cryptography and the pyOpensSSL packages does not support\n #parsing the Attributes extension in a CSR, so instead it is stored\n #outside of the CSR\n# elif option == 'challengePassword':\n# value = self.enrollment_password\n else:\n value = self.__class__.DEFAULT[section][option]\n# print \"\\t{}={}\".format(option, value)\n self.config.set(section, option, value)\n with open(self.configfile, 'wb') as configfd:\n self.config.write(configfd)\n configfd.close()\n confsort.reorder(self.configfile)", "def opensslCmsCertCreate( ownerCertFile ):\n opensslCmdArgs = [ \"openssl\", \"crl2pkcs7\", \"-certfile\", ownerCertFile,\n \"-nocrl\", \"-outform\", \"der\" ]\n ownerCertCmsDerBase64 = runOpensslCmd( opensslCmdArgs, [ \"base64\" ] )\n return ownerCertCmsDerBase64", "def keygen():\n pk, pub = generate_signing_key()\n t = PrettyTable([\"Private (install on your witness node)\",\n \"Public (publish with 'conductor enable' command)\"])\n t.align = \"l\"\n t.add_row([pk, pub])\n\n output(t, '')", "def handler(e, c): # handler(event, context, /)\n\n get_remaining_time_in_millis = c.get_remaining_time_in_millis\n\n log_info(e)\n\n def request_cert():\n \"\"\"\n Create a certificate\n\n This create an ACM certificate and update the event payload with the PhysicalResourceId.\n The certificate will not yet be issued.\n\n \"\"\"\n\n api_request = shallow_copy(props)\n\n for key in ['ServiceToken', 'Region', 'Tags', 'Route53RoleArn']:\n api_request.pop(key, None)\n\n if 'ValidationMethod' in props:\n if props['ValidationMethod'] == 'DNS':\n\n # Check that we have all the hosted zone information we need to validate\n # before we create the certificate\n for name in set([props['DomainName']] + props.get('SubjectAlternativeNames', [])):\n get_zone_for(name)\n\n del api_request['DomainValidationOptions']\n\n e['PhysicalResourceId'] = acm.request_certificate(\n IdempotencyToken=i_token,\n **api_request\n )['CertificateArn']\n add_tags()\n\n def delete_certificate(a): # delete_certificate(arn, /)\n \"\"\"\n Delete a certificate\n\n Attempts to delete a certificate.\n\n :param str a: Arn of the certificate to delete\n\n \"\"\"\n\n while True:\n\n try:\n acm.delete_certificate(**{'CertificateArn': a})\n return\n except ClientError as exception:\n log_exception('')\n\n err_code = exception.response['Error']['Code']\n\n if err_code == 'ResourceInUseException':\n if get_remaining_time_in_millis() / 1000 < 30:\n raise\n\n sleep(5)\n continue\n\n if err_code in ['ResourceNotFoundException', 'ValidationException']:\n # If the arn is invalid, it didn't exist anyway.\n return\n\n raise\n\n except ParamValidationError:\n # invalid arn\n return\n\n def find_certificate(p): # find_certificate(props, /)\n \"\"\"\n Find a certificate that belongs to this stack\n\n If the certificate is not found, returns None.\n\n :param dict p: The properties of the certificate to find\n :returns: The arn of the certificate\n :rtype: str or None\n\n \"\"\"\n\n for page in acm.get_paginator('list_certificates').paginate():\n for certificate in page['CertificateSummaryList']:\n log_info(certificate)\n\n if p['DomainName'].lower() == certificate['DomainName']:\n tags = {tag['Key']: tag['Value'] for tag in\n acm.list_tags_for_certificate(**{'CertificateArn': certificate['CertificateArn']})['Tags']}\n\n if (tags.get('cloudformation:' + 'logical-id') == e['LogicalResourceId'] and\n tags.get('cloudformation:' + 'stack-id') == e['StackId'] and\n tags.get('cloudformation:' + 'properties') == hash_func(p)\n ):\n return certificate['CertificateArn']\n\n def reinvoke():\n \"\"\"\n Reinvoke this lambda\n\n The time to issue a certificate may be more than the lambda can execute for.\n This reinvokes this lambda to continue waiting.\n\n If this lambda has itself been reinvoked, instead raise a RuntimeError.\n\n \"\"\"\n\n # Only Reinvoke once, which is a total of 30 minutes running\n if REINVOKED in e:\n raise RuntimeError('Certificate not issued in time')\n\n e[REINVOKED] = REINVOKED\n\n log_info(e)\n client('lambda').invoke(\n FunctionName=c.invoked_function_arn,\n InvocationType='Event',\n Payload=json_dumps(e)\n )\n\n def wait_for_issuance():\n \"\"\"\n Wait until a certificate is issued\n\n Returns True when issued, False when lambda execution time is up.\n If the certificate fails to issue, a RuntimeError is raised\n\n :rtype: bool\n\n \"\"\"\n\n while (get_remaining_time_in_millis() / 1000) > 30:\n\n cert = acm.describe_certificate(**{'CertificateArn': e['PhysicalResourceId']})['Certificate']\n log_info(cert)\n\n if cert['Status'] == 'ISSUED':\n return True\n elif cert['Status'] == 'FAILED':\n raise RuntimeError(cert.get('FailureReason', ''))\n\n sleep(5)\n\n return False\n\n def replace_cert():\n \"\"\"\n Does the update require replacement of the certificate?\n\n Only tags can be updated without replacement\n\n :rtype: bool\n\n \"\"\"\n\n old = shallow_copy(e['Old' + 'ResourceProperties'])\n old.pop('Tags', None)\n\n new = shallow_copy(e['ResourceProperties'])\n new.pop('Tags', None)\n\n return old != new\n\n def validate():\n \"\"\"\n Add DNS validation records for a certificate\n\n \"\"\"\n\n if props.get('ValidationMethod') != 'DNS':\n return\n\n while True:\n cert = acm.describe_certificate(**{'CertificateArn': e['PhysicalResourceId']})['Certificate']\n log_info(cert)\n\n if cert['Status'] != 'PENDING_VALIDATION':\n return\n\n if not [\n validation_option\n for validation_option in cert.get('DomainValidationOptions', [{}])\n if 'ValidationStatus' not in validation_option\n or 'ResourceRecord' not in validation_option\n ]:\n # All validation options have a status and resource record to create\n break\n\n sleep(1)\n\n for validation_option in cert['DomainValidationOptions']:\n\n if validation_option['ValidationStatus'] == 'PENDING_VALIDATION':\n hosted_zone = get_zone_for(validation_option['DomainName'])\n\n role_arn = hosted_zone.get('Route53RoleArn', props.get('Route53RoleArn'))\n\n sts = client('sts').assume_role(\n RoleArn=role_arn,\n RoleSessionName=('Certificate' + e['LogicalResourceId'])[:64],\n DurationSeconds=900,\n )['Credentials'] if role_arn is not None else {}\n\n route53 = client('route53',\n aws_access_key_id=sts.get('AccessKeyId'),\n aws_secret_access_key=sts.get('SecretAccessKey'),\n aws_session_token=sts.get('SessionToken'),\n ).change_resource_record_sets(**{\n 'HostedZoneId': hosted_zone['HostedZoneId'],\n 'ChangeBatch': {\n 'Comment': 'Domain validation for ' + e['PhysicalResourceId'],\n 'Changes': [{\n 'Action': 'UPSERT',\n 'ResourceRecordSet': {\n 'Name': validation_option['ResourceRecord']['Name'],\n 'Type': validation_option['ResourceRecord']['Type'],\n 'TTL': 60,\n 'ResourceRecords': [{'Value': validation_option['ResourceRecord']['Value']}],\n },\n }],\n }},\n )\n\n log_info(route53)\n\n def get_zone_for(n): # get_zone_for(name, /)\n \"\"\"\n Return the hosted zone to use for validating a name\n\n :param str n: The name to validate\n :rtype: dict\n\n \"\"\"\n\n n = n.rstrip('.')\n zones = {domain['DomainName'].rstrip('.'): domain for domain in props['DomainValidationOptions']}\n\n parts = n.split('.')\n\n while len(parts):\n if '.'.join(parts) in zones:\n return zones['.'.join(parts)]\n\n parts = parts[1:]\n\n raise RuntimeError('DomainValidationOptions' + ' missing for ' + n)\n\n hash_func = lambda v: hashlib.new('md5', json_dumps(v)).hexdigest()\n\n def add_tags():\n \"\"\"\n Add tags from the ResourceProperties to the Certificate\n\n Also adds logical-id, stack-id, stack-name and properties tags, which are used by the custom resource.\n\n \"\"\"\n\n tags = shallow_copy(e['ResourceProperties'].get('Tags', []))\n tags += [\n {'Key': 'cloudformation:' + 'logical-id', 'Value': e['LogicalResourceId']},\n {'Key': 'cloudformation:' + 'stack-id', 'Value': e['StackId']},\n {'Key': 'cloudformation:' + 'stack-name', 'Value': e['StackId'].split('/')[1]},\n {'Key': 'cloudformation:' + 'properties', 'Value': hash_func(e['ResourceProperties'])}\n ]\n\n acm.add_tags_to_certificate(**{'CertificateArn': e['PhysicalResourceId'], 'Tags': tags})\n\n def send_response():\n \"\"\"\n Send a response to cloudformation\n\n \"\"\"\n\n log_info(e)\n\n response = urlopen(Request(e['ResponseURL'], json_dumps(e), {'content-type': ''}, method='PUT'))\n\n if response.status != 200:\n raise Exception(response)\n\n try:\n i_token = hash_func(e['RequestId'] + e['StackId'])\n props = e['ResourceProperties']\n\n acm = client('acm', region_name=props.get('Region'))\n\n e['Status'] = 'SUCCESS'\n\n if e['RequestType'] == 'Create':\n\n if REINVOKED not in e:\n e['PhysicalResourceId'] = 'None'\n request_cert()\n\n validate()\n\n if not wait_for_issuance():\n return reinvoke()\n\n elif e['RequestType'] == 'Delete':\n\n if e['PhysicalResourceId'] != 'None':\n if e['PhysicalResourceId'].startswith('arn:'):\n delete_certificate(e['PhysicalResourceId'])\n else:\n delete_certificate(find_certificate(props))\n\n elif e['RequestType'] == 'Update':\n\n if replace_cert():\n log_info('Update')\n\n if find_certificate(props) == e['PhysicalResourceId']:\n # This is an update cancel request.\n\n # Try and delete the new certificate that is no longer required\n try:\n acm = client('acm', region_name=e['OldResourceProperties'].get('Region'))\n log_info('Delete')\n delete_certificate(find_certificate(e['OldResourceProperties']))\n except:\n log_exception('')\n\n # return success for the update - nothing changed\n return send_response()\n\n if REINVOKED not in e:\n request_cert()\n\n validate()\n\n if not wait_for_issuance():\n return reinvoke()\n else:\n if 'Tags' in e['Old' + 'ResourceProperties']:\n acm.remove_tags_from_certificate(**{\n 'CertificateArn': e['PhysicalResourceId'],\n 'Tags': e['Old' + 'ResourceProperties']['Tags']\n })\n\n add_tags()\n\n else:\n raise RuntimeError(e['RequestType'])\n\n return send_response()\n\n except Exception as ex:\n log_exception('')\n e['Status'] = 'FAILED'\n e['Reason'] = str(ex)\n return send_response()", "def issue_certificate(self, csr, validity):\n csr = csr.encode(encoding='UTF-8')\n with stats.timer('issue_certificate'):\n client = confidant.clients.get_boto_client('acm-pca')\n response = client.issue_certificate(\n CertificateAuthorityArn=self.settings['arn'],\n Csr=csr,\n SigningAlgorithm=self.settings['signing_algorithm'],\n Validity={\n 'Value': min(validity, self.settings['max_validity_days']),\n 'Type': 'DAYS',\n },\n # Quick/easy idempotent token is just a hash of the csr itself.\n # The token must be 36 chars or less.\n IdempotencyToken=hashlib.sha256(csr).hexdigest()[:36],\n )\n return response['CertificateArn']", "def create(ctx):\n pass", "def __init__(__self__,\n resource_name: str,\n args: ServerCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def create_keys(self):\n crypto_tool = CryptoTools()\n # creating RSA keys for the signer user\n public_key, private_key = crypto_tool.create_key_with_entropy()\n self.priv_key = crypto_tool.get_pem_format(private_key).decode(\"utf-8\")\n self.pub_key = crypto_tool.get_pem_format(public_key).decode(\"utf-8\")" ]
[ "0.6959776", "0.68491095", "0.6717614", "0.6480697", "0.646453", "0.6371925", "0.6347647", "0.6325621", "0.6219762", "0.6209572", "0.61821324", "0.6039916", "0.58269167", "0.58240783", "0.5717246", "0.56873", "0.5679572", "0.5627929", "0.5522485", "0.5479461", "0.5473484", "0.54261416", "0.53729886", "0.53589314", "0.53462595", "0.53275394", "0.5291644", "0.52909124", "0.52785635", "0.5260957", "0.52578354", "0.52315027", "0.5207552", "0.51955825", "0.5179027", "0.51705176", "0.5126884", "0.5110029", "0.5109532", "0.51081717", "0.508553", "0.50812286", "0.508102", "0.50639725", "0.50592417", "0.50493234", "0.50370085", "0.50101894", "0.5007057", "0.49977177", "0.49846148", "0.49775052", "0.49658805", "0.4965129", "0.49646953", "0.4964681", "0.4964681", "0.49629825", "0.49581614", "0.4955993", "0.4955993", "0.4945727", "0.49456823", "0.49332675", "0.4918617", "0.49160293", "0.49092698", "0.49033362", "0.49032968", "0.48899543", "0.4879918", "0.48592752", "0.48555198", "0.48530117", "0.48228872", "0.48221636", "0.4810472", "0.4805697", "0.47999117", "0.47982484", "0.47955135", "0.47921285", "0.4783799", "0.47814193", "0.47811642", "0.47795266", "0.4770531", "0.47678882", "0.4767591", "0.47669345", "0.4766226", "0.47606415", "0.47545433", "0.47476727", "0.47470284", "0.4744276", "0.47439963", "0.4727496", "0.47143614", "0.4713934" ]
0.54836375
19
Main cert creation code.
def create_x509_cert(privkey, pubkey, subject_info, issuer_info, days): if not isinstance(subject_info, CertInfo): info = CertInfo() info.load_from_existing(subject_info) subject_info = info if not isinstance(issuer_info, CertInfo): info = CertInfo() info.load_from_existing(issuer_info) issuer_info = info dt_now = datetime.utcnow() dt_start = dt_now - timedelta(hours=1) dt_end = dt_now + timedelta(days=days) builder = (x509.CertificateBuilder() .subject_name(subject_info.get_name()) .issuer_name(issuer_info.get_name()) .not_valid_before(dt_start) .not_valid_after(dt_end) .serial_number(int(uuid.uuid4())) .public_key(pubkey)) builder = subject_info.install_extensions(builder) # SubjectKeyIdentifier ext = x509.SubjectKeyIdentifier.from_public_key(pubkey) builder = builder.add_extension(ext, critical=False) # AuthorityKeyIdentifier ext = x509.AuthorityKeyIdentifier.from_issuer_public_key(privkey.public_key()) builder = builder.add_extension(ext, critical=False) # IssuerAlternativeName if issuer_info.san: ext = x509.IssuerAlternativeName(issuer_info.get_san_gnames()) builder = builder.add_extension(ext, critical=False) # final cert cert = builder.sign(private_key=privkey, algorithm=SHA256(), backend=get_backend()) return cert
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new_cert(self, commonname, extensions=None):\n\n serial = self._get_serial()\n pkey = self._create_pkey(commonname, serial)\n self._create_cert(pkey, commonname, serial, extensions)", "def _init_keys(self):\n\n basic_constraints = crypto.X509Extension('basicConstraints'.encode('ascii'), True,\n 'CA:TRUE, pathlen:0'.encode('ascii'))\n serial = self._get_serial()\n pkey = self._create_pkey(self.commonname, serial)\n self._create_cert(pkey, self.commonname, serial, [basic_constraints], expire=30*365)", "def create_pki():\n os.mkdir(pki_dir)\n os.mkdir(f'{pki_dir}/newcerts')\n Path(f'{pki_dir}/index.txt').touch()\n with open(f'{pki_dir}/serial', 'w') as serial_file:\n serial_file.write('00000000')\n serial_file.close()\n create_CA('/CN=My cool CA/O=Honest Achmed/OU=Used Cars/C=EU')", "def _create_certificate_chain():\n caext = X509Extension(b\"basicConstraints\", False, b\"CA:true\")\n not_after_date = datetime.date.today() + datetime.timedelta(days=365)\n not_after = not_after_date.strftime(\"%Y%m%d%H%M%SZ\").encode(\"ascii\")\n\n # Step 1\n cakey = PKey()\n cakey.generate_key(TYPE_RSA, 2048)\n cacert = X509()\n cacert.set_version(2)\n cacert.get_subject().commonName = \"Authority Certificate\"\n cacert.set_issuer(cacert.get_subject())\n cacert.set_pubkey(cakey)\n cacert.set_notBefore(b\"20000101000000Z\")\n cacert.set_notAfter(not_after)\n cacert.add_extensions([caext])\n cacert.set_serial_number(0)\n cacert.sign(cakey, \"sha256\")\n\n # Step 2\n ikey = PKey()\n ikey.generate_key(TYPE_RSA, 2048)\n icert = X509()\n icert.set_version(2)\n icert.get_subject().commonName = \"Intermediate Certificate\"\n icert.set_issuer(cacert.get_subject())\n icert.set_pubkey(ikey)\n icert.set_notBefore(b\"20000101000000Z\")\n icert.set_notAfter(not_after)\n icert.add_extensions([caext])\n icert.set_serial_number(0)\n icert.sign(cakey, \"sha256\")\n\n # Step 3\n skey = PKey()\n skey.generate_key(TYPE_RSA, 2048)\n scert = X509()\n scert.set_version(2)\n scert.get_subject().commonName = \"Server Certificate\"\n scert.set_issuer(icert.get_subject())\n scert.set_pubkey(skey)\n scert.set_notBefore(b\"20000101000000Z\")\n scert.set_notAfter(not_after)\n scert.add_extensions(\n [X509Extension(b\"basicConstraints\", True, b\"CA:false\")]\n )\n scert.set_serial_number(0)\n scert.sign(ikey, \"sha256\")\n\n return [(cakey, cacert), (ikey, icert), (skey, scert)]", "def create_certs(application_name, ip, issuer_name, signing_key):\n logging.info(\"Creating cert for {}\".format(application_name))\n # The IP is used as the CN for backward compatability and as an\n # alternative_name for forward comapability.\n (key, cert) = zaza.openstack.utilities.cert.generate_cert(\n ip,\n issuer_name=ISSUER_NAME,\n alternative_names=[ip],\n signing_key=signing_key)\n APP_CERT_DIR = os.path.join(CERT_DIR, application_name)\n if not os.path.exists(APP_CERT_DIR):\n os.makedirs(APP_CERT_DIR)\n write_cert(APP_CERT_DIR, 'cert.pem', cert)\n write_cert(APP_CERT_DIR, 'cert.key', key)", "def create_cert(self, cert_file, key_file):\n if os.path.isfile(cert_file) and os.path.isfile(key_file):\n return cert_file, key_file\n\n k = crypto.PKey()\n k.generate_key(crypto.TYPE_RSA, 2048)\n cert = crypto.X509()\n cert.get_subject().C = \"US\"\n cert.get_subject().ST = \"CO\"\n cert.get_subject().L = \"Denver\"\n cert.get_subject().CN = gethostname()\n cert.get_subject().O = \"Metropolitan State University of Denver\"\n cert.get_subject().OU = \"Computer Science\"\n cert.set_serial_number(6)\n cert.gmtime_adj_notBefore(0)\n cert.gmtime_adj_notAfter(365*24*60*60)\n cert.set_issuer(cert.get_subject())\n cert.set_pubkey(k)\n cert.sign(k, 'sha1')\n\n open(join(cert_file), 'w').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))\n open(join(key_file), \"w\").write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))\n return cert_file, key_file", "def create_ssl_cert_request ( ssl_hostnames ) :\n first_hostname = ssl_hostnames[ 0 ]\n csr_filename = get_ssl_csr_filename( first_hostname )\n key_filename = get_ssl_key_filename( first_hostname )\n openssl_cnf = \"\"\"\n[req]\ndistinguished_name = req_distinguished_name\nreq_extensions = san_ext\n\n[req_distinguished_name]\ncountryName_default = US\nstateOrProvinceName_default = New York\nlocalityName_default = New York\norganizationalUnitName_default = Home Box Office, Inc\ncommonName_default = \"\"\" + first_hostname + \"\"\"\n\n[san_ext]\nbasicConstraints = CA:FALSE\nkeyUsage = nonRepudiation, digitalSignature, keyEncipherment\nsubjectAltName = @sans\n\n[sans]\n\"\"\"\n counter = 0\n for hostname in ssl_hostnames :\n counter += 1\n openssl_cnf += 'DNS.' + str( counter ) + ' = ' + hostname + '\\n'\n\n with open( first_hostname, 'w' ) as f :\n f.write( openssl_cnf )\n cmd = 'openssl req -new -newkey rsa:2048 -nodes -out ' + csr_filename + ' -keyout ' + key_filename\n cmd += ' -config ' + first_hostname + ' -subj \"/C=US/ST=New York/L=New York/O=Home Box Office Inc/CN=' + first_hostname + '\"'\n keygen = subprocess.call( cmd, shell = True )\n os.remove( first_hostname )\n if keygen != 0 :\n print \"Generation of SSL request failed!\"\n return None\n\n return { 'csr-filename' : csr_filename, 'key-filename' : key_filename }", "def create_cert(commonname, ca_dir):\n sca = SimpleCA(ca_dir)\n sca.new_cert(commonname)", "def create_selfsigned_certificates(name):\n pass", "def create_server_certs():\n global server_key_files, server_keystore, config\n\n same_enc_sign_cert = config[\"config\"][\"same_enc_sign_cert\"]\n if not Path(server_key_files[\"key\"]).is_file() or not Path(server_key_files[\"crt\"]).is_file():\n print(\"create new encryption cert\\n\")\n create_server_certs_enc()\n for f_item in [\"key\", \"crt\"]:\n with open(server_key_files[f_item], \"w\") as f:\n f.write(server_keystore[f_item])\n f.close()\n else:\n for f_item in [\"key\", \"crt\"]:\n with open(server_key_files[f_item], \"r\") as f:\n server_keystore[f_item] = f.read()\n f.close()\n\n server_keystore[\"key-sign\"] = server_keystore[\"key\"]\n server_keystore[\"crt-sign\"] = server_keystore[\"crt\"]\n\n if not Path(server_key_files[\"key-sign\"]).is_file() or not Path(server_key_files[\"crt-sign\"]).is_file():\n print(\"create new signing cert\\n\")\n if not same_enc_sign_cert:\n create_server_certs_sign()\n for f_item in [\"key-sign\", \"crt-sign\"]:\n with open(server_key_files[f_item], \"w\") as f:\n f.write(server_keystore[f_item])\n f.close()\n else:\n for f_item in [\"key-sign\", \"crt-sign\"]:\n with open(server_key_files[f_item], \"r\") as f:\n server_keystore[f_item] = f.read()\n f.close()", "def CreateStarCert(filename, log = logging):\n temp1 = tempfile.mkstemp(prefix = 'ssl_proxy')\n temp2 = tempfile.mkstemp(prefix = 'ssl_proxy')\n\n cert_fields = { \"C\": \"US\", \"ST\": \"**INSECURE CONNECTION**\",\n \"L\": \"**INSECURE CONNECTION**\",\n \"O\": \"**INSECURE CONNECTION**\",\n \"OU\": \"**INSECURE CONNECTION**\",\n \"CN\": \"*\" }\n\n cert_valid_days = 1\n\n cert_string = '/C=%(C)s/ST=%(ST)s/L=%(L)s/O=%(O)s/OU=%(OU)s/CN=%(CN)s' % \\\n cert_fields\n\n openssl_command = 'openssl req -newkey rsa:1024 -keyout \"%s\" -nodes ' \\\n '-x509 -days 365 -out \"%s\" -subj \"%s\" -set_serial 0 -days %s ' \\\n '-batch' % (temp1[1], temp2[1], cert_string, cert_valid_days)\n\n find_openssl = os.system('which openssl > /dev/null')\n\n if not find_openssl == 0:\n log.error('Could not find openssl. (Used \"which openssl\" to search)')\n raise OSError, 'Command \"which openssl\" returned: %s' % find_openssl\n\n log.info('Running command: %s' % openssl_command)\n openssl_status = os.system(openssl_command)\n if not openssl_status == 0:\n raise OSError, 'Attempt to run openssl returned: %s' % openssl_status\n\n # Extract the keys into strings.\n key = os.read(temp1[0], 2048)\n cert = os.read(temp2[0], 2048)\n\n os.close(temp1[0])\n os.close(temp2[0])\n\n os.unlink(temp1[1])\n os.unlink(temp2[1])\n\n new_cert = open(filename, 'wb')\n new_cert.write('%s\\n%s' % (key, cert))\n\n new_cert.close()\n\n log.info('Successfully created %s' % filename)\n return True", "def generate_certificates():\n print('GEN CERTS')\n domain = os.environ.get('SSL_DOMAIN_NAME', 'localhost')\n email = os.environ.get('SSL_EMAIL', '[email protected]')\n print(domain)\n cert_path = '/etc/letsencrypt/live/' + domain\n if domain == \"localhost\":\n print('GEN LOCALHOST SSL KEY')\n call(['mkdir', '-p', cert_path])\n cmd = [\n 'openssl',\n 'req',\n '-x509',\n '-newkey',\n 'rsa:4096',\n '-keyout',\n cert_path +\n '/privkey.pem',\n '-out',\n cert_path +\n '/cert.pem',\n '-days',\n '365',\n '-nodes',\n '-subj',\n '/CN=localhost']\n call(cmd)\n\n else:\n # files exist so renew\n if os.path.isfile(cert_path + '/cert.pem') and os.path.isfile(cert_path + \\\n '/fullchain.pem') and os.path.isfile(cert_path + '/privkey.pem'):\n print('RENEW CERTS')\n cmd = ['certbot', 'renew']\n print(cmd)\n call(cmd)\n\n else:\n print('GENERATE CERTS')\n cmd = [\n 'certbot',\n 'certonly',\n '-a',\n 'standalone',\n '--agree-tos',\n '-d',\n domain,\n '-m',\n email,\n ' --noninteractive']\n print(cmd)\n call(cmd)\n\n # use mosquitto conf template to rewrite mosquitto conf file including env\n # SSL_CERTIFICATES_FOLDER\n marker_replace_template(\n \"/etc/mosquitto/mosquitto-ssl-template.conf\",\n \"/etc/mosquitto/mosquitto-ssl.conf\",\n 'SSL_CERTIFICATE_FOLDER',\n cert_path)", "def initial_setup():\n\n if os.path.exists(cfg.ca_private_key_path()):\n pkey = _try_load_ca_private_key(cfg.ca_private_key_path())\n else:\n pkey = _generate_ca_private_key(cfg.ca_private_key_path())\n\n if os.path.exists(cfg.ca_cert_path()):\n _try_load_ca_cert(cfg.ca_cert_path())\n else:\n _generate_ca_cert(cfg.ca_cert_path(), pkey)", "def create_CA(dn):\n cmd_genrsa = [\"openssl\",\n \"genrsa\",\n \"-aes256\",\n \"-out\", f'{pki_dir}/ca.key',\n \"-passout\", f'pass:{ca_password}',\n f'{rsa_keysize}']\n cmd_req = [\"openssl\",\n \"req\",\n \"-new\",\n \"-x509\",\n \"-days\", \"999999\",\n \"-sha256\",\n \"-key\", f'{pki_dir}/ca.key',\n \"-out\", server_key_files[\"ca\"],\n \"-subj\", f'{dn}',\n \"-passin\", f'pass:{ca_password}']\n cmds = [cmd_genrsa, cmd_req]\n for cmd in cmds:\n exec_cmd(cmd)", "def Certificate(self) -> _n_8_t_0:", "def Certificate(self) -> _n_8_t_0:", "def generate(name, domain, country, state, locale, email,\n keytype, keylength):\n if not domain:\n logger.error(\n \"ctl:info:generate\", \"Choose a fully-qualified domain name of the \"\n \"certificate. Must match a domain present on the system\"\n )\n domain = click.prompt(\"Domain name\")\n if not country:\n logger.info(\n \"ctl:cert:generate\",\n \"Two-character country code (ex.: 'US' or 'CA')\"\n )\n country = click.prompt(\"Country code\")\n if not state:\n state = click.prompt(\"State/Province\")\n if not locale:\n locale = click.prompt(\"City/Town/Locale\")\n if not email:\n email = click.prompt(\"Contact email [optional]\")\n try:\n cmd = client().certificates.generate\n job, data = cmd(\n name, domain, country, state, locale, email, keytype, keylength)\n handle_job(job)\n except Exception as e:\n raise CLIException(str(e))", "def main():\n ssl_date_fmt = r'%b %d %H:%M:%S %Y %Z'\n #cert_file_name = os.path.join(os.path.dirname(__file__), \"testcert.pem\")\n\n parser = argparse.ArgumentParser(description='Parse a certificate and show days left')\n parser.add_argument('-v', '--verbose', action='store_true', help='show full certificate')\n parser.add_argument('cert', nargs='+', help='certifcate file(s)')\n args = parser.parse_args()\n for cert_file_name in args.cert:\n try:\n cert_dict = ssl._ssl._test_decode_cert(cert_file_name)\n serial = cert_dict['serialNumber']\n subject = dict(x[0] for x in cert_dict['subject'])\n issued_to = subject['commonName']\n time_left = datetime.datetime.strptime(cert_dict['notAfter'], ssl_date_fmt) - datetime.datetime.utcnow()\n if args.verbose:\n pp(cert_dict)\n ssl_expires_in(issued_to, serial, time_left)\n\n except Exception as error:\n print(\"Error decoding certificate: {:}\".format(error))", "def _generate_certificates(certfile_path: str, keyfile_path: str,\n common_name: str) -> None:\n ca_key = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM,\n _CA_KEY)\n ca_cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM,\n _CA_CERT)\n\n k = OpenSSL.crypto.PKey()\n k.generate_key(OpenSSL.crypto.TYPE_RSA, 4096)\n\n cert = OpenSSL.crypto.X509()\n cert.get_subject().C = 'US'\n cert.get_subject().CN = common_name\n cert.set_serial_number(random.randint(0, 2**64))\n cert.gmtime_adj_notBefore(0)\n cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60)\n cert.set_issuer(ca_cert.get_subject())\n cert.set_pubkey(k)\n cert.sign(ca_key, 'sha512')\n with open(certfile_path, \"w\") as f:\n f.write(\n OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM,\n cert).decode(\"utf-8\"))\n f.write(_CA_CERT)\n with open(keyfile_path, \"w\") as f:\n f.write(\n OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM,\n k).decode(\"utf-8\"))", "def opensslCmsCertCreate( ownerCertFile ):\n opensslCmdArgs = [ \"openssl\", \"crl2pkcs7\", \"-certfile\", ownerCertFile,\n \"-nocrl\", \"-outform\", \"der\" ]\n ownerCertCmsDerBase64 = runOpensslCmd( opensslCmdArgs, [ \"base64\" ] )\n return ownerCertCmsDerBase64", "def get_ssl_certificate() :", "def __init__(self, ca, cert_type):\n self.ca = ca # required\n self.nonce = None # optional, has default = os.urandom(32)\n self.public_key_comment = None\n self.serial = None # can be set, has default = 0\n self.cert_type = None # required: User = 1, Host = 2\n self.key_id = None # optional, default = ''\n self.valid_principals = list() # optional, default = ''\n self.valid_after = None # optional, default = 0\n self.valid_before = None # optional, default = 2^64-1\n self.critical_option_force_command = None # optional, default = ''\n self.critical_option_source_address = None # optional, default = ''\n self.extensions = None # optional, default = ''\n self.reserved = '' # should always be this value\n self.signature = None\n self.signed_cert = None\n self.public_key_comment = None\n self.cert_type = cert_type", "def __init__(__self__,\n resource_name: str,\n args: CertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def request_cert():\n\n api_request = shallow_copy(props)\n\n for key in ['ServiceToken', 'Region', 'Tags', 'Route53RoleArn']:\n api_request.pop(key, None)\n\n if 'ValidationMethod' in props:\n if props['ValidationMethod'] == 'DNS':\n\n # Check that we have all the hosted zone information we need to validate\n # before we create the certificate\n for name in set([props['DomainName']] + props.get('SubjectAlternativeNames', [])):\n get_zone_for(name)\n\n del api_request['DomainValidationOptions']\n\n e['PhysicalResourceId'] = acm.request_certificate(\n IdempotencyToken=i_token,\n **api_request\n )['CertificateArn']\n add_tags()", "def create_server_certs_enc():\n global server_keystore, config\n\n same_enc_sign_cert = config[\"config\"][\"same_enc_sign_cert\"]\n if same_enc_sign_cert:\n dn = \"/CN=server certificate RSA\"\n else:\n dn = \"/CN=server certificate encryption RSA\"\n key_pair_rsa = create_csr(dn)\n server_keystore[\"key\"] = key_pair_rsa[\"key\"]\n san = [f'URI.1 = {uuid.uuid4().urn}']\n server_keystore[\"crt\"] = sign_csr(key_pair_rsa[\"pub\"], dn, san)", "def put_certificate(self, target, who, args, _files, _user_path):\n name = self.arg_get(args, 'name', str)\n if not commonl.verify_str_safe(name, do_raise = False):\n raise ValueError(\n f\"{name}: invalid certificate name, only [-_a-zA-Z0-9] allowed\")\n\n with target.target_owned_and_locked(who):\n target.timestamp()\n\n cert_path = os.path.join(target.state_dir, \"certificates\")\n cert_client_path = os.path.join(target.state_dir, \"certificates_client\")\n self._setup_maybe(target, cert_path, cert_client_path)\n\n client_key_path = os.path.join(cert_client_path, name + \".key\")\n client_req_path = os.path.join(cert_client_path, name + \".req\")\n client_cert_path = os.path.join(cert_client_path, name + \".cert\")\n\n if os.path.isfile(client_key_path) \\\n and os.path.isfile(client_cert_path):\t# already made?\n with open(client_key_path) as keyf, \\\n open(client_cert_path) as certf:\n return dict({\n \"name\": name,\n \"created\": False,\n \"key\": keyf.read(),\n \"cert\": certf.read(),\n })\n\n try:\n subprocess.run(\n f\"openssl genrsa -out {client_key_path} {self.key_size}\".split(),\n stdin = None, timeout = 5,\n capture_output = True, cwd = cert_path, check = True)\n allocid = target.fsdb.get(\"_alloc.id\", \"UNKNOWN\")\n subprocess.run(\n f\"openssl req -new -key {client_key_path} -out {client_req_path}\"\n f\" -subj /C=LC/ST=Local/L=Local/O=TCF-Signing-Authority-{target.id}-{allocid}/CN=TCF-{name}\".split(),\n check = True, cwd = cert_path,\n stdout = subprocess.PIPE, stderr = subprocess.STDOUT)\n target.log.debug(f\"{name}: created client's certificate\")\n\n # Issue the client certificate using the cert request and the CA cert/key.\n # note we run in the cert_path directory, so the ca.*\n # files are there\n subprocess.run(\n f\"openssl x509 -req -in {client_req_path} -CA ca.cert\"\n \" -CAkey ca.key -set_serial 101 -extensions client\"\n f\" -days 365 -outform PEM -out {client_cert_path}\".split(),\n stdin = None, timeout = 5,\n capture_output = True, cwd = cert_path, check = True)\n except subprocess.CalledProcessError as e:\n target.log.error(f\"command {' '.join(e.cmd)} failed: {e.output}\")\n self._client_wipe(name, cert_client_path)\t# don't leave things half there\n raise\n\n with open(client_key_path) as keyf, \\\n open(client_cert_path) as certf:\n return dict({\n \"name\": name,\n \"created\": True,\n \"key\": keyf.read(),\n \"cert\": certf.read(),\n })", "def generate_unsigned_certificate(cn,subjaltname=None,ca=True,ksize=1024):\n\n key = crypto.PKey()\n key.generate_key(crypto.TYPE_RSA, ksize)\n\n cert = crypto.X509()\n cert.set_version(2)\n\n cn = split_cn(cn)\n for k,v in cn.items():\n setattr(cert.get_subject(),k,v)\n\n cert.set_serial_number(get_serial())\n ten_years = 10*365*24*60*60\n cert.gmtime_adj_notBefore(-ten_years)\n cert.gmtime_adj_notAfter(ten_years)\n cert.set_pubkey(key)\n\n extensions = []\n\n if subjaltname:\n extensions.append(crypto.X509Extension(b\"subjectAltName\",False,b\", \".join(subjaltname)))\n else:\n extensions.append(crypto.X509Extension(b\"subjectAltName\",False,b\"DNS:%s\" % (cn[\"CN\"].encode(\"utf-8\"),)))\n\n if ca:\n extensions.append(crypto.X509Extension(b'basicConstraints', True, b'CA:TRUE'))\n else:\n extensions.append(crypto.X509Extension(b'basicConstraints', True, b'CA:FALSE'))\n\n extensions.append(crypto.X509Extension(b'extendedKeyUsage', True, b'serverAuth,emailProtection,timeStamping'))\n extensions.append(crypto.X509Extension(b'keyUsage', False, b\"digitalSignature, keyEncipherment\"))\n extensions.append(crypto.X509Extension(b'subjectKeyIdentifier', False, b'hash', subject=cert))\n cert.add_extensions(extensions)\n\n return cert,key", "def create_server_certs_sign():\n global server_keystore\n\n dn_sign = \"/CN=server certificate sign RSA-PSS\"\n key_pair_rsa_sign = create_csr_pss(dn_sign)\n server_keystore[\"key-sign\"] = key_pair_rsa_sign[\"key\"]\n san = [f'URI.1 = {uuid.uuid4().urn}']\n server_keystore[\"crt-sign\"] = sign_csr(key_pair_rsa_sign[\"pub\"], dn_sign, san)", "def __init__(\n self,\n key: Optional[OpenSSL.crypto.PKey] = None,\n cert: Optional[OpenSSL.crypto.X509] = None,\n ):\n self.key = key if key else new_RSA()\n self.cert = cert if cert else new_X509()\n\n # Creates CA.\n self.cert.set_pubkey(self.key)\n self.cert.add_extensions(\n [\n OpenSSL.crypto.X509Extension(b\"basicConstraints\", True, b\"CA:TRUE, pathlen:0\"),\n OpenSSL.crypto.X509Extension(b\"keyUsage\", True, b\"keyCertSign, cRLSign\"),\n OpenSSL.crypto.X509Extension(b\"subjectKeyIdentifier\", False, b\"hash\", subject=self.cert),\n ],\n )\n self.cert.sign(self.key, \"sha256\")", "def create_self_signed_cert():\n\n # create a key pair\n k = crypto.PKey()\n k.generate_key(crypto.TYPE_RSA, 1024)\n\n # create a self-signed cert\n cert = crypto.X509()\n cert.get_subject().C = \"GP\"\n cert.get_subject().ST = \"GRAD_PROJECT\"\n cert.get_subject().L = \"GRAD_PROJECT\"\n cert.get_subject().OU = \"GRAD_PROJECT\"\n cert.get_subject().CN = gethostname()\n cert.set_serial_number(1000)\n cert.gmtime_adj_notBefore(0)\n cert.gmtime_adj_notAfter(10*365*24*60*60)\n cert.set_issuer(cert.get_subject())\n cert.set_pubkey(k)\n cert.sign(k, 'sha1')\n\n cert_file = open(CERT_FILE, \"wb\")\n cert_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))\n cert_file.close()\n\n key_file = open(KEY_FILE, \"wb\")\n key_file.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))\n key_file.close()", "def create_certificate(self, subscription_id, management_host, hackathon_name):\n\n # make sure certificate dir exists\n if not os.path.isdir(self.CERT_BASE):\n self.log.debug('certificate dir not exists')\n os.mkdir(self.CERT_BASE)\n\n base_url = '%s/%s' % (self.CERT_BASE, subscription_id)\n\n pem_url = base_url + '.pem'\n # avoid duplicate pem generation\n if not os.path.isfile(pem_url):\n pem_command = 'openssl req -x509 -nodes -days 365 -newkey rsa:1024 -keyout %s -out %s -batch' % \\\n (pem_url, pem_url)\n commands.getstatusoutput(pem_command)\n else:\n self.log.debug('%s exists' % pem_url)\n\n cert_url = base_url + '.cer'\n # avoid duplicate cert generation\n if not os.path.isfile(cert_url):\n cert_command = 'openssl x509 -inform pem -in %s -outform der -out %s' % (pem_url, cert_url)\n commands.getstatusoutput(cert_command)\n else:\n self.log.debug('%s exists' % cert_url)\n\n azure_key = self.db.find_first_object_by(AzureKey,\n cert_url=cert_url,\n pem_url=pem_url,\n subscription_id=subscription_id,\n management_host=management_host)\n # avoid duplicate azure key\n if azure_key is None:\n azure_key = self.db.add_object_kwargs(AzureKey,\n cert_url=cert_url,\n pem_url=pem_url,\n subscription_id=subscription_id,\n management_host=management_host)\n self.db.commit()\n else:\n self.log.debug('azure key exists')\n\n hackathon_id = self.db.find_first_object_by(Hackathon, name=hackathon_name).id\n hackathon_azure_key = self.db.find_first_object_by(HackathonAzureKey,\n hackathon_id=hackathon_id,\n azure_key_id=azure_key.id)\n # avoid duplicate hackathon azure key\n if hackathon_azure_key is None:\n self.db.add_object_kwargs(HackathonAzureKey,\n hackathon_id=hackathon_id,\n azure_key_id=azure_key.id)\n self.db.commit()\n else:\n self.log.debug('hackathon azure key exists')\n\n azure_cert_url = self.file_service.upload_file_to_azure_from_path(cert_url, self.CONTAINER_NAME,\n subscription_id + '.cer')\n azure_key.cert_url = azure_cert_url\n self.db.commit()\n return azure_cert_url", "def __init__(__self__,\n resource_name: str,\n args: ServerCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def get_ssl_certificate():", "def gen_ca():\n require_root()\n\n config.proxy.gen_ca_certs()\n log.info('OK')", "def __init__(self, proxy_only = False):\n self.key_file = None\n self.cert_file = None\n self.ca_path = None\n self.key_pass = None\n\n path = os.getenv(\"X509_CERT_DIR\", None)\n if path and os.path.exists(path):\n self.ca_path = path\n\n if not self.ca_path:\n path = \"/etc/grid-security/certificates\"\n if os.path.exists(path):\n self.ca_path = path\n\n path = os.getenv(\"X509_USER_PROXY\", None)\n if path and os.path.exists(path):\n self.key_file = self.cert_file = path\n\n if not self.key_file:\n path = os.getenv(\"X509_USER_KEY\", None)\n if path and os.path.exists(path):\n self.key_file = path\n\n if not self.cert_file:\n path = os.getenv(\"X509_USER_CERT\", None)\n if path and os.path.exists(path):\n self.cert_file = path\n\n if not self.key_file:\n path = os.getenv(\"HOME\") + \"/.globus/userkey.pem\"\n if os.path.exists(path):\n self.key_file = path\n\n if not self.cert_file:\n path = os.getenv(\"HOME\") + \"/.globus/usercert.pem\"\n if os.path.exists(path):\n self.cert_file = path\n\n if not self.ca_path or not os.path.exists(self.ca_path):\n raise RuntimeError(\"no certificate directory found\")\n\n if not self.key_file or not os.path.exists(self.key_file):\n raise RuntimeError(\"no certificate private key file found\")\n\n if not self.cert_file or not os.path.exists(self.cert_file):\n raise RuntimeError(\"no certificate public key file found\")\n\n if not proxy_only and self.key_file != self.cert_file:\n self.key_pass = getpass(\"Password for %s: \" % self.key_file)", "def __init__(__self__,\n resource_name: str,\n args: SSLCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def certificate_data(prog):\n retval = Prog.RetVal.ok\n prog.log.info3(\"+++ generating certificate data (hashes)...\")\n for target in prog.target_list:\n uniq = []\n for t in target.tlsa:\n if t.params() in uniq:\n continue\n uniq += [ t.params() ]\n\n prog.log.info3(\n \" ++ tlsa: {}{}{}, request: {}\".format(t.usage, t.selector,\n t.matching,\n target.domain))\n try:\n data = get_data(prog, target.domain, t)\n for d in data:\n prog.log.info3(\n \" + cert: {}\\n + data: {}\".format(d[0], d[1]))\n\n # The only time we _don't_ print this, is if we are\n # printing the log info to stdout and the debug level\n # is 'debug':\n if not (prog.log.type == logging.LogType.stdout\n and prog.log.level == logging.LogLevel.debug):\n print(\"{} {} {} {} {} {}\".format(\n get_domain(prog, d[0]),\n t.usage, t.selector, t.matching, d[1], d[0]))\n\n except (Except.FunctionError, Except.InternalError,\n Except.DNSProcessingError) as ex:\n prog.log.error(\"{}: {}\".format(target.domain, ex.message))\n retval = Prog.RetVal.exit_failure\n continue\n\n return retval", "def generate_root_CA():\n\n ##generating root key\n\n root_private_key = rsa.generate_private_key(\n public_exponent=65537,\n key_size=2048,\n backend=default_backend())\n\n\n ##self-sign and generate the root certificate\n\n root_public_key = root_private_key.public_key()\n builder = x509.CertificateBuilder()\n builder = builder.subject_name(x509.Name([\n x509.NameAttribute(NameOID.COMMON_NAME, u'Northeastern SSL Test CA'),\n x509.NameAttribute(NameOID.ORGANIZATION_NAME, u'Northeastern'),\n x509.NameAttribute(NameOID.ORGANIZATIONAL_UNIT_NAME, u'SSL Clock Skews'),\n ]))\n\n builder = builder.issuer_name(x509.Name([\n x509.NameAttribute(NameOID.COMMON_NAME, u'Northeastern SSL Test CA'),\n ]))\n builder = builder.not_valid_before(datetime.datetime.today() - datetime.timedelta(days=1))\n builder = builder.not_valid_after(datetime.datetime(2019, 12, 31))\n builder = builder.serial_number(int(uuid.uuid4()))\n builder = builder.public_key(root_public_key)\n builder = builder.add_extension(\n x509.BasicConstraints(ca=True, path_length=None), critical=True,)\n\n root_certificate = builder.sign(\n private_key=root_private_key, algorithm=hashes.SHA256(),\n backend=default_backend()\n )\n\n\n ##write to disk\n \n\n\n with open(\"rootCA.key\", \"wb\") as f:\n f.write(root_private_key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()\n ))\n\n with open(\"rootCA.crt\", \"wb\") as f:\n f.write(root_certificate.public_bytes(\n encoding=serialization.Encoding.PEM,\n ))\n\n return root_private_key, root_certificate", "def setUp(self):\n self._tmp_dir = tempfile.mkdtemp(prefix='ssm')\n\n # Some functions require the hardcoded expired certificate and\n # key to be files.\n key_fd, self._key_path = tempfile.mkstemp(prefix='key',\n dir=self._tmp_dir)\n os.close(key_fd)\n with open(self._key_path, 'w') as key:\n key.write(TEST_KEY)\n\n cert_fd, self._expired_cert_path = tempfile.mkstemp(prefix='cert',\n dir=self._tmp_dir)\n os.close(cert_fd)\n with open(self._expired_cert_path, 'w') as cert:\n cert.write(EXPIRED_CERT)\n\n valid_dn_file, self.valid_dn_path = tempfile.mkstemp(\n prefix='valid', dir=self._tmp_dir)\n os.close(valid_dn_file)\n with open(self.valid_dn_path, 'w') as dn:\n dn.write('/test/dn')\n\n # Create a new certificate using the hardcoded key.\n # The subject has been hardcoded so that the generated\n # certificate subject matches the subject of the hardcoded,\n # expired, certificate at the bottom of this file.\n # 2 days used so that verify_cert_date doesn't think it expires soon.\n call(['openssl', 'req', '-x509', '-nodes', '-days', '2', '-new',\n '-key', self._key_path, '-out', TEST_CERT_FILE,\n '-subj', '/C=UK/O=STFC/OU=SC/CN=Test Cert'])\n\n self._brokers = [('not.a.broker', 123)]\n self._capath = '/not/a/path'\n self._check_crls = False\n self._pidfile = self._tmp_dir + '/pidfile'\n\n self._listen = '/topic/test'\n self._dest = '/topic/test'\n\n self._msgdir = tempfile.mkdtemp(prefix='msgq')", "def _generate_ca_cert(path, pkey):\n crt = _make_base_cert(pkey, 5000, socket.gethostname(),\n random.randrange(0, 2**64))\n crt.set_issuer(crt.get_subject())\n crt.sign(pkey, 'sha256')\n\n data = crypto.dump_certificate(crypto.FILETYPE_PEM, crt)\n open(path, 'wb').write(data)", "def init_pki():\n global server_keystore\n\n if pki_is_persistent:\n if not Path(pki_dir).is_dir():\n create_pki()\n else:\n print(f'Do nothing, {pki_dir} already exists')\n else:\n if Path(pki_dir).is_dir():\n shutil.rmtree(pki_dir)\n create_pki()\n with open(server_key_files[\"ca\"]) as crt:\n server_keystore[\"ca\"] = crt.read()\n crt.close()", "def __init__(self, cert_string=None, cert_file=None, key_string=None, key_file=None, passphrase=None):\n self._context = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)\n\n if cert_file:\n # we have to load certificate for equality check. there is no\n # other way to obtain certificate from context.\n with open(cert_file, 'rb') as fp:\n cert_string = fp.read()\n\n cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert_string)\n self._context.use_certificate(cert)\n\n if not key_string and not key_file:\n # OpenSSL is smart enought to locate private key in certificate\n args = [OpenSSL.crypto.FILETYPE_PEM, cert_string]\n if passphrase is not None:\n args.append(passphrase)\n\n pk = OpenSSL.crypto.load_privatekey(*args)\n self._context.use_privatekey(pk)\n elif key_file and not passphrase:\n self._context.use_privatekey_file(key_file, OpenSSL.crypto.FILETYPE_PEM)\n\n else:\n if key_file:\n # key file is provided with passphrase. context.use_privatekey_file\n # does not use passphrase, so we have to load the key file manually.\n with open(key_file, 'rb') as fp:\n key_string = fp.read()\n\n args = [OpenSSL.crypto.FILETYPE_PEM, key_string]\n if passphrase is not None:\n args.append(passphrase)\n\n pk = OpenSSL.crypto.load_privatekey(*args)\n self._context.use_privatekey(pk)\n\n # check if we are not passed some garbage\n self._context.check_privatekey()\n\n # used to compare certificates.\n self._equality = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert)", "def generate_test_cert(**kwargs):\n sans = kwargs['sans'].split(',')\n if not os.path.exists(TEST_CA_CERT_PATH):\n print('CA cert not found, generating CA certs.')\n run(['mkdir', '-p', TEST_CA_ROOT_PATH])\n generate_ca_cert(TEST_CA_CERT_PATH, TEST_CA_KEY_PATH)\n\n cn = sans[0]\n\n cert_path = os.path.join(TEST_CA_ROOT_PATH, '{cn}.crt'.format(cn=cn))\n key_path = os.path.join(TEST_CA_ROOT_PATH, '{cn}.key'.format(cn=cn))\n try:\n _generate_ssl_certificate(\n sans,\n cn,\n cert_path,\n key_path,\n TEST_CA_CERT_PATH,\n TEST_CA_KEY_PATH,\n )\n except Exception as err:\n sys.stderr.write(\n 'Certificate creation failed: {err_type}- {msg}\\n'.format(\n err_type=type(err),\n msg=str(err),\n )\n )\n raise\n\n print(\n 'Created cert and key:\\n'\n ' {cert}\\n'\n ' {key}\\n'\n '\\n'\n 'CA cert: {ca_cert}'.format(\n cert=cert_path,\n key=key_path,\n ca_cert=TEST_CA_CERT_PATH,\n )\n )", "def deploy_cert(self, domain, cert_path, key_path, chain_path, fullchain_path):\n if self.config.rsa_key_size > 2048:\n print(\n \"The maximum public key size allowed for Cloudfront is 2048 (\"\n \"https://docs.aws.amazon.com/AmazonCloudFront/latest\"\n \"/DeveloperGuide/cnames-and-https-requirements.html)\\n\"\n \"Please, use --rsa_key_size 2048 or edit your cli.ini\")\n sys.exit(1)\n client = boto3.client('iam')\n cf_client = boto3.client('cloudfront')\n\n name = \"le-%s\" % domain\n body = open(cert_path).read()\n key = open(key_path).read()\n chain = open(chain_path).read()\n\n suffix = \"-%i\" % int(os.path.getmtime(cert_path))\n\n # Check if certificate already exists\n certificates = client.list_server_certificates(\n PathPrefix=\"/cloudfront/letsencrypt/\"\n )\n cert_id = None\n for cert in certificates['ServerCertificateMetadataList']:\n if cert['ServerCertificateName'] == (name + suffix):\n cert_id = cert['ServerCertificateId']\n\n # If certificate doesn't already exists, upload cert to IAM\n if not cert_id:\n response = client.upload_server_certificate(\n Path=\"/cloudfront/letsencrypt/\",\n ServerCertificateName=name + suffix,\n CertificateBody=body,\n PrivateKey=key,\n CertificateChain=chain\n )\n cert_id = response['ServerCertificateMetadata']['ServerCertificateId']\n\n # Update CloudFront config to use the new one\n cf_cfg = cf_client.get_distribution_config(Id=self.conf('cf-distribution-id'))\n cf_cfg['DistributionConfig']['ViewerCertificate']['IAMCertificateId'] = cert_id\n cf_cfg['DistributionConfig']['ViewerCertificate']['Certificate'] = cert_id\n cf_cfg['DistributionConfig']['ViewerCertificate']['CertificateSource'] = 'iam'\n\n # Set the default mode to SNI-only to avoid surprise charges\n if 'SSLSupportMethod' not in cf_cfg['DistributionConfig']['ViewerCertificate']:\n cf_cfg['DistributionConfig']['ViewerCertificate']['SSLSupportMethod'] = 'sni-only'\n cf_cfg['DistributionConfig']['ViewerCertificate']['MinimumProtocolVersion'] = 'TLSv1'\n\n try:\n cf_cfg['DistributionConfig']['ViewerCertificate'].pop('CloudFrontDefaultCertificate')\n except KeyError:\n pass\n try:\n cf_cfg['DistributionConfig']['ViewerCertificate'].pop('ACMCertificateArn')\n except KeyError:\n pass\n response = cf_client.update_distribution(DistributionConfig=cf_cfg['DistributionConfig'],\n Id=self.conf('cf-distribution-id'),\n IfMatch=cf_cfg['ETag'])\n\n # Delete old certs\n certificates = client.list_server_certificates(\n PathPrefix=\"/cloudfront/letsencrypt/\"\n )\n for cert in certificates['ServerCertificateMetadataList']:\n if (cert['ServerCertificateName'].startswith(name) and\n cert['ServerCertificateName'] != name + suffix):\n try:\n client.delete_server_certificate(\n ServerCertificateName=cert['ServerCertificateName']\n )\n except botocore.exceptions.ClientError as e:\n logger.error(e)", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[str]] = None,\n certificate_name: Optional[pulumi.Input[str]] = None,\n domain: Optional[pulumi.Input[str]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def generate_key_and_cert():\n signing_key = rsa.generate_private_key(backend=crypto_default_backend(), public_exponent=65537, key_size=2048)\n subject = issuer = x509.Name(\n [\n x509.NameAttribute(NameOID.COUNTRY_NAME, 'NO'),\n x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, 'OSLO'),\n x509.NameAttribute(NameOID.LOCALITY_NAME, 'OSLO'),\n x509.NameAttribute(NameOID.ORGANIZATION_NAME, 'Intility AS'),\n x509.NameAttribute(NameOID.COMMON_NAME, 'intility.no'),\n ]\n )\n signing_cert = (\n x509.CertificateBuilder()\n .subject_name(subject)\n .issuer_name(issuer)\n .public_key(signing_key.public_key())\n .serial_number(x509.random_serial_number())\n .not_valid_before(datetime.utcnow())\n .not_valid_after(\n # Our certificate will be valid for 10 days\n datetime.utcnow()\n + timedelta(days=10)\n # Sign our certificate with our private key\n )\n .sign(signing_key, hashes.SHA256(), crypto_default_backend())\n .public_bytes(crypto_serialization.Encoding.DER)\n )\n return signing_key, signing_cert", "def test_warn_self_signed(self):\n config_dir = self.mktemp()\n os.mkdir(config_dir)\n with open(os.path.join(config_dir, \"cert.pem\"), \"w\") as f:\n f.write(\n \"\"\"-----BEGIN CERTIFICATE-----\nMIID6DCCAtACAws9CjANBgkqhkiG9w0BAQUFADCBtzELMAkGA1UEBhMCVFIxDzAN\nBgNVBAgMBsOHb3J1bTEUMBIGA1UEBwwLQmHFn21ha8OnxLExEjAQBgNVBAMMCWxv\nY2FsaG9zdDEcMBoGA1UECgwTVHdpc3RlZCBNYXRyaXggTGFiczEkMCIGA1UECwwb\nQXV0b21hdGVkIFRlc3RpbmcgQXV0aG9yaXR5MSkwJwYJKoZIhvcNAQkBFhpzZWN1\ncml0eUB0d2lzdGVkbWF0cml4LmNvbTAgFw0xNzA3MTIxNDAxNTNaGA8yMTE3MDYx\nODE0MDE1M1owgbcxCzAJBgNVBAYTAlRSMQ8wDQYDVQQIDAbDh29ydW0xFDASBgNV\nBAcMC0JhxZ9tYWvDp8SxMRIwEAYDVQQDDAlsb2NhbGhvc3QxHDAaBgNVBAoME1R3\naXN0ZWQgTWF0cml4IExhYnMxJDAiBgNVBAsMG0F1dG9tYXRlZCBUZXN0aW5nIEF1\ndGhvcml0eTEpMCcGCSqGSIb3DQEJARYac2VjdXJpdHlAdHdpc3RlZG1hdHJpeC5j\nb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDwT6kbqtMUI0sMkx4h\nI+L780dA59KfksZCqJGmOsMD6hte9EguasfkZzvCF3dk3NhwCjFSOvKx6rCwiteo\nWtYkVfo+rSuVNmt7bEsOUDtuTcaxTzIFB+yHOYwAaoz3zQkyVW0c4pzioiLCGCmf\nFLdiDBQGGp74tb+7a0V6kC3vMLFoM3L6QWq5uYRB5+xLzlPJ734ltyvfZHL3Us6p\ncUbK+3WTWvb4ER0W2RqArAj6Bc/ERQKIAPFEiZi9bIYTwvBH27OKHRz+KoY/G8zY\n+l+WZoJqDhupRAQAuh7O7V/y6bSP+KNxJRie9QkZvw1PSaGSXtGJI3WWdO12/Ulg\nepJpAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAJXEq5P9xwvP9aDkXIqzcD0L8sf8\newlhlxTQdeqt2Nace0Yk18lIo2oj1t86Y8jNbpAnZJeI813Rr5M7FbHCXoRc/SZG\nI8OtG1xGwcok53lyDuuUUDexnK4O5BkjKiVlNPg4HPim5Kuj2hRNFfNt/F2BVIlj\niZupikC5MT1LQaRwidkSNxCku1TfAyueiBwhLnFwTmIGNnhuDCutEVAD9kFmcJN2\nSznugAcPk4doX2+rL+ila+ThqgPzIkwTUHtnmjI0TI6xsDUlXz5S3UyudrE2Qsfz\ns4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg=\n-----END CERTIFICATE-----\"\"\"\n )\n\n config = {\n \"tls_certificate_path\": os.path.join(config_dir, \"cert.pem\"),\n \"tls_fingerprints\": [],\n }\n\n t = TestConfig()\n t.read_config(config, config_dir_path=\"\", data_dir_path=\"\")\n t.read_certificate_from_disk(require_cert_and_key=False)\n\n warnings = self.flushWarnings()\n self.assertEqual(len(warnings), 1)\n self.assertEqual(\n warnings[0][\"message\"],\n (\n \"Self-signed TLS certificates will not be accepted by \"\n \"Synapse 1.0. Please either provide a valid certificate, \"\n \"or use Synapse's ACME support to provision one.\"\n ),\n )", "def __init__(__self__, *,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[str]] = None,\n certificate_name: Optional[pulumi.Input[str]] = None,\n domain: Optional[pulumi.Input[str]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None):\n if certificate is not None:\n pulumi.set(__self__, \"certificate\", certificate)\n if certificate_id is not None:\n pulumi.set(__self__, \"certificate_id\", certificate_id)\n if certificate_name is not None:\n pulumi.set(__self__, \"certificate_name\", certificate_name)\n if domain is not None:\n pulumi.set(__self__, \"domain\", domain)\n if instance_id is not None:\n pulumi.set(__self__, \"instance_id\", instance_id)\n if private_key is not None:\n pulumi.set(__self__, \"private_key\", private_key)", "def _Run(args, holder, ssl_certificate_ref):\n client = holder.client\n\n certificate_type, self_managed, managed = _ParseCertificateArguments(\n client, args)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n project=ssl_certificate_ref.project)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n collection = client.apitools_client.regionSslCertificates\n else:\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])", "def _new_runtime_credentials(self, force=False, readonly=False):\n _log.debug(\"new_runtime_credentials\")\n #Create keys and certificate request\n private_key = os.path.join(self.runtime_dir, \"private\", \"private.key\")\n private = os.path.dirname(private_key)\n _log.debug(\"new_runtime: %s\" % self.runtime_dir)\n out = os.path.join(self.runtime_dir, \"{}.csr\".format(self.node_name))\n _log.debug(\"out dir: %s\"% out)\n # Create ECC-based certificate\n log = subprocess.Popen([\"openssl\", \"ecparam\", \"-genkey\",\n \"-name\", \"prime256v1\",\n \"-out\", private_key],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n stdout, stderr = log.communicate()\n if log.returncode != 0:\n raise IOError(stderr)\n\n log = subprocess.Popen([\"openssl\", \"req\", \"-new\",\n \"-config\",self.configfile,\n # \"-subj\", subject,\n \"-key\", private_key,\n \"-nodes\",\n \"-utf8\",\n \"-out\", out],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n stdout, stderr = log.communicate()\n if log.returncode != 0:\n raise IOError(stderr)\n\n return out", "def _bpki_certify(cls, keypair, issuer_name, subject_name, subject_key,\n serial, now, notAfter, pathLenConstraint, is_ca):\n\n if now is None:\n now = rpki.sundial.now()\n\n issuer_key = keypair.get_public()\n\n assert (issuer_key == subject_key) == (issuer_name == subject_name)\n assert is_ca or issuer_name != subject_name\n assert is_ca or pathLenConstraint is None\n assert pathLenConstraint is None or (isinstance(pathLenConstraint, (int, long)) and\n pathLenConstraint >= 0)\n\n cert = rpki.POW.X509()\n cert.setVersion(2)\n cert.setSerial(serial)\n cert.setIssuer(issuer_name.get_POW())\n cert.setSubject(subject_name.get_POW())\n cert.setNotBefore(now)\n cert.setNotAfter(notAfter)\n cert.setPublicKey(subject_key.get_POW())\n cert.setSKI(subject_key.get_POW().calculateSKI())\n if issuer_key != subject_key:\n cert.setAKI(issuer_key.get_POW().calculateSKI())\n if is_ca:\n cert.setBasicConstraints(True, pathLenConstraint)\n cert.sign(keypair.get_POW(), rpki.POW.SHA256_DIGEST)\n return cls(POW = cert)", "def __init__(__self__, *,\n cert_chain_object_name: Optional[pulumi.Input[str]] = None,\n cert_object_name: Optional[pulumi.Input[str]] = None,\n key_object_name: Optional[pulumi.Input[str]] = None,\n key_vault_id: Optional[pulumi.Input[str]] = None,\n root_cert_object_name: Optional[pulumi.Input[str]] = None):\n if cert_chain_object_name is not None:\n pulumi.set(__self__, \"cert_chain_object_name\", cert_chain_object_name)\n if cert_object_name is not None:\n pulumi.set(__self__, \"cert_object_name\", cert_object_name)\n if key_object_name is not None:\n pulumi.set(__self__, \"key_object_name\", key_object_name)\n if key_vault_id is not None:\n pulumi.set(__self__, \"key_vault_id\", key_vault_id)\n if root_cert_object_name is not None:\n pulumi.set(__self__, \"root_cert_object_name\", root_cert_object_name)", "def build_certifications(data_dir, output_dir):\n return yamls_to_certification.create_yaml_certifications(\n data_dir=data_dir, output_dir=output_dir\n )", "def add_cert():\n\n if not g.user:\n flash(\"Please login to access\", \"danger\")\n return redirect(\"/\")\n \n if g.user.is_admin == False:\n flash (\"Unauthorized\", \"danger\")\n return redirect(\"/login\")\n\n form = Cert_Form()\n\n if form.validate_on_submit():\n cert = Cert(\n cert_name = form.cert_name.data,\n hours = form.hours.data,\n is_required = form.is_required.data,\n expire = form.expire.data,\n good_for_time = form.good_for_time.data,\n good_for_unit = form.good_for_unit.data,\n \n )\n db.session.add(cert)\n db.session.commit()\n\n flash(\"Certification Added!\", \"success\")\n return redirect(\"/administrator\")\n\n else: \n\n return render_template(\"/admin/add_cert.html\", form = form)", "def _new_opensslconf(self):\n# print \"new_opensslconf\"\n _log.debug(\"__init__::new_opensslconf\")\n for section in self.__class__.DEFAULT.keys():\n self.config.add_section(section)\n# print \"[{}]\".format(section)\n hostname = socket.gethostname()\n for option in self.__class__.DEFAULT[section]:\n if option == \"0.organizationName\":\n value = self.domain\n #TODO: use dynamic number of DNS entries instead of hardcoding the number\n elif option == \"DNS.1\":\n value = self.node_name\n elif (option == \"DNS.2\") and len(self.hostnames)>0:\n value = self.hostnames[0]\n elif (option == \"DNS.3\") and len(self.hostnames)>1:\n value = self.hostnames[1]\n elif (option == \"DNS.4\") and len(self.hostnames)>2:\n value = self.hostnames[2]\n elif option == \"IP.1\":\n value = self.ip\n elif option == \"dir\":\n value = self.runtime_dir\n elif section == 'req_distinguished_name' and option == 'commonName':\n value = self.node_name\n elif option == 'dnQualifier':\n value = self.node_id\n #The python cryptography and the pyOpensSSL packages does not support\n #parsing the Attributes extension in a CSR, so instead it is stored\n #outside of the CSR\n# elif option == 'challengePassword':\n# value = self.enrollment_password\n else:\n value = self.__class__.DEFAULT[section][option]\n# print \"\\t{}={}\".format(option, value)\n self.config.set(section, option, value)\n with open(self.configfile, 'wb') as configfd:\n self.config.write(configfd)\n configfd.close()\n confsort.reorder(self.configfile)", "def __init__(__self__, *,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[int]] = None,\n creation_timestamp: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n expire_time: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n self_link: Optional[pulumi.Input[str]] = None):\n if certificate is not None:\n pulumi.set(__self__, \"certificate\", certificate)\n if certificate_id is not None:\n pulumi.set(__self__, \"certificate_id\", certificate_id)\n if creation_timestamp is not None:\n pulumi.set(__self__, \"creation_timestamp\", creation_timestamp)\n if description is not None:\n pulumi.set(__self__, \"description\", description)\n if expire_time is not None:\n pulumi.set(__self__, \"expire_time\", expire_time)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if name_prefix is not None:\n pulumi.set(__self__, \"name_prefix\", name_prefix)\n if private_key is not None:\n pulumi.set(__self__, \"private_key\", private_key)\n if project is not None:\n pulumi.set(__self__, \"project\", project)\n if self_link is not None:\n pulumi.set(__self__, \"self_link\", self_link)", "def create_https_certificates(ssl_cert, ssl_key):\n\n import logger\n from OpenSSL import crypto\n from certgen import createKeyPair, createCertRequest, createCertificate, \\\n TYPE_RSA, serial\n\n # Create the CA Certificate\n cakey = createKeyPair(TYPE_RSA, 2048)\n careq = createCertRequest(cakey, CN=\"Certificate Authority\")\n cacert = createCertificate(careq, (careq, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years\n\n pkey = createKeyPair(TYPE_RSA, 2048)\n req = createCertRequest(pkey, CN=\"Mylar\")\n cert = createCertificate(req, (cacert, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years\n\n # Save the key and certificate to disk\n try:\n with open(ssl_key, \"w\") as fp:\n fp.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))\n with open(ssl_cert, \"w\") as fp:\n fp.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))\n except IOError as e:\n logger.error(\"Error creating SSL key and certificate: %s\", e)\n return False\n\n return True", "def __init__(__self__, *,\n cert_challenge_discovered_txt: Sequence[str],\n cert_challenge_dns: 'outputs.CertDnsChallengeResponse',\n cert_challenge_http: 'outputs.CertHttpChallengeResponse',\n cert_status: str,\n discovered_ips: Sequence[str],\n dns_fetch_time: str,\n dns_status: str,\n expected_ips: Sequence[str]):\n pulumi.set(__self__, \"cert_challenge_discovered_txt\", cert_challenge_discovered_txt)\n pulumi.set(__self__, \"cert_challenge_dns\", cert_challenge_dns)\n pulumi.set(__self__, \"cert_challenge_http\", cert_challenge_http)\n pulumi.set(__self__, \"cert_status\", cert_status)\n pulumi.set(__self__, \"discovered_ips\", discovered_ips)\n pulumi.set(__self__, \"dns_fetch_time\", dns_fetch_time)\n pulumi.set(__self__, \"dns_status\", dns_status)\n pulumi.set(__self__, \"expected_ips\", expected_ips)", "def add_cert(self, student, course_id, course=None, forced_grade=None, template_file=None, title='None'):\r\n\r\n VALID_STATUSES = [status.generating,\r\n status.unavailable,\r\n status.deleted,\r\n status.error,\r\n status.notpassing]\r\n\r\n cert_status = certificate_status_for_student(student, course_id)['status']\r\n\r\n new_status = cert_status\r\n\r\n if cert_status in VALID_STATUSES:\r\n # grade the student\r\n\r\n # re-use the course passed in optionally so we don't have to re-fetch everything\r\n # for every student\r\n if course is None:\r\n course = courses.get_course_by_id(course_id)\r\n profile = UserProfile.objects.get(user=student)\r\n profile_name = profile.name\r\n\r\n # Needed\r\n self.request.user = student\r\n self.request.session = {}\r\n\r\n course_name = course.display_name or course_id.to_deprecated_string()\r\n is_whitelisted = self.whitelist.filter(user=student, course_id=course_id, whitelist=True).exists()\r\n grade = grades.grade(student, self.request, course)\r\n enrollment_mode = CourseEnrollment.enrollment_mode_for_user(student, course_id)\r\n mode_is_verified = (enrollment_mode == GeneratedCertificate.MODES.verified)\r\n user_is_verified = SoftwareSecurePhotoVerification.user_is_verified(student)\r\n user_is_reverified = SoftwareSecurePhotoVerification.user_is_reverified_for_all(course_id, student)\r\n cert_mode = enrollment_mode\r\n if (mode_is_verified and user_is_verified and user_is_reverified):\r\n template_pdf = \"certificate-template-{id.org}-{id.course}-verified.pdf\".format(id=course_id)\r\n elif (mode_is_verified and not (user_is_verified and user_is_reverified)):\r\n template_pdf = \"certificate-template-{id.org}-{id.course}.pdf\".format(id=course_id)\r\n cert_mode = GeneratedCertificate.MODES.honor\r\n else:\r\n # honor code and audit students\r\n template_pdf = \"certificate-template-{id.org}-{id.course}.pdf\".format(id=course_id)\r\n if forced_grade:\r\n grade['grade'] = forced_grade\r\n\r\n cert, __ = GeneratedCertificate.objects.get_or_create(user=student, course_id=course_id)\r\n\r\n cert.mode = cert_mode\r\n cert.user = student\r\n cert.grade = grade['percent']\r\n cert.course_id = course_id\r\n cert.name = profile_name\r\n # Strip HTML from grade range label\r\n grade_contents = grade.get('grade', None)\r\n try:\r\n grade_contents = lxml.html.fromstring(grade_contents).text_content()\r\n except (TypeError, XMLSyntaxError, ParserError) as e:\r\n # Despite blowing up the xml parser, bad values here are fine\r\n grade_contents = None\r\n\r\n if is_whitelisted or grade_contents is not None:\r\n\r\n # check to see whether the student is on the\r\n # the embargoed country restricted list\r\n # otherwise, put a new certificate request\r\n # on the queue\r\n\r\n if self.restricted.filter(user=student).exists():\r\n new_status = status.restricted\r\n cert.status = new_status\r\n cert.save()\r\n else:\r\n key = make_hashkey(random.random())\r\n cert.key = key\r\n contents = {\r\n 'action': 'create',\r\n 'username': student.username,\r\n 'course_id': course_id.to_deprecated_string(),\r\n 'course_name': course_name,\r\n 'name': profile_name,\r\n 'grade': grade_contents,\r\n 'template_pdf': template_pdf,\r\n }\r\n if template_file:\r\n contents['template_pdf'] = template_file\r\n new_status = status.generating\r\n cert.status = new_status\r\n cert.save()\r\n self._send_to_xqueue(contents, key)\r\n else:\r\n cert_status = status.notpassing\r\n cert.status = cert_status\r\n cert.save()\r\n\r\n return new_status", "def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n client = holder.client\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.regionSslCertificates\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])", "def __init__(self, enterprise_cert_file_path):\n self._enterprise_cert_file_path = enterprise_cert_file_path\n self._cert = None\n self._sign_callback = None", "async def _create_context(self) -> ssl.SSLContext:\n context = utils.server_context_modern()\n\n await self.cloud.run_executor(\n context.load_cert_chain,\n self._acme.path_fullchain,\n self._acme.path_private_key,\n )\n\n return context", "def get_self_signed_cert(cert_dir):\n if not os.path.exists(cert_dir):\n os.mkdir(cert_dir)\n if not os.path.exists(os.path.join(cert_dir, CERT_FILE)) \\\n or not os.path.exists(os.path.join(cert_dir, KEY_FILE)):\n # create a key pair\n k = crypto.PKey()\n k.generate_key(crypto.TYPE_RSA, 1024)\n\n # create a self-signed cert\n cert = crypto.X509()\n cert.get_subject().C = \"FR\"\n cert.get_subject().L = \".\"\n cert.get_subject().O = \".\"\n cert.get_subject().OU = \".\"\n cert.get_subject().CN = gethostname()\n cert.set_serial_number(1000)\n cert.gmtime_adj_notBefore(0)\n cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60)\n cert.set_issuer(cert.get_subject())\n cert.set_pubkey(k)\n cert.sign(k, \"sha1\")\n\n open(os.path.join(cert_dir, CERT_FILE), \"wt\").write(\n crypto.dump_certificate(crypto.FILETYPE_PEM, cert))\n open(os.path.join(cert_dir, KEY_FILE), \"w+\").write(\n crypto.dump_privatekey(crypto.FILETYPE_PEM, k))\n return (os.path.join(cert_dir, CERT_FILE),\n os.path.join(cert_dir, KEY_FILE))", "def CreateCrtFile(keyfile, csrfile):\n crtfile = tempfile.mkstemp()[1]\n cmd = [\n 'openssl',\n 'x509',\n '-req',\n '-days', '1',\n '-in', csrfile,\n '-signkey', keyfile,\n '-out', crtfile\n ]\n _RunCommand(cmd)\n return crtfile", "def make_cert_for_spki_request(spki_req_b64, serial, ident):\n spki_obj = netscape_spki_from_b64(spki_req_b64)\n if spki_obj is None:\n raise ValueError('Invalid SPKI object')\n\n root_crt = _try_load_ca_cert(cfg.ca_cert_path())\n root_key = _try_load_ca_private_key(cfg.ca_private_key_path())\n crt = _make_base_cert(spki_obj.get_pubkey(), 365, ident, serial)\n crt.set_issuer(root_crt.get_subject())\n crt.sign(root_key, 'sha256')\n return crypto.dump_certificate(crypto.FILETYPE_ASN1, crt)", "def push_ssl_crt():\n logger.info(u\"Pushing SSl Certificates\")\n key = '%(config_folder)s/%(ssl_key)s' % env\n crt = '%(config_folder)s/%(ssl_crt)s' % env\n bundle = '%(config_folder)s/rapidssl_ca_bundle.pem' % env\n logger.info(u\"Using SSL keys and certs at %s and %s\" % (key, crt))\n\n # Putting to /tmp and moving for permission purposes\n put(key, '/tmp/_.policystat.com.key')\n sudo('mv /tmp/_.policystat.com.key /etc/ssl/private/_.policystat.com.key')\n sudo('chmod 640 /etc/ssl/private/_.policystat.com.key')\n sudo('chown root:ssl-cert /etc/ssl/private/_.policystat.com.key')\n\n put(crt, '/tmp/_.policystat.com.crt')\n put(bundle, '/tmp/rapidssl_ca_bundle.pem')\n # Combine the crt with the rapidssl intermediate bundle\n sudo('cat /tmp/_.policystat.com.crt /tmp/rapidssl_ca_bundle.pem > \\\n /tmp/_.policystat.com.crt.bundled')\n sudo(\n 'mv /tmp/_.policystat.com.crt.bundled '\n '/etc/ssl/certs/_.policystat.com.crt'\n )\n sudo('chmod 777 /etc/ssl/certs/_.policystat.com.crt')", "def fusion_api_create_certificate_request(self, body, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/certificaterequest'\n return self.ic.post(uri=uri, body=body, api=api, headers=headers, param=param)", "def __init__(__self__, *,\n cert_name: Optional[pulumi.Input[str]] = None,\n thumbprint: Optional[pulumi.Input[str]] = None):\n if cert_name is not None:\n pulumi.set(__self__, \"cert_name\", cert_name)\n if thumbprint is not None:\n pulumi.set(__self__, \"thumbprint\", thumbprint)", "def __init__(__self__,\n resource_name: str,\n args: OriginCaCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(self):\n self.root_cert = None\n self.persistent_state = dict()", "def test_add_certificate(self):\n response = self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate added successfully')\n assert response.status_code == 201", "def assign(id, type, appid, specialid):\n try:\n client().certificates.assign(id, type, appid, specialid)\n logger.info(\n 'ctl:cert:assign', 'Assigned {0} to {0}'.format(id, appid)\n )\n except Exception as e:\n raise CLIException(str(e))", "def __check_opts(self):\n self.ca_cert_file = os.environ['HOME'] + '/.cat_installer/ca.pem'\n self.pfx_file = os.environ['HOME'] + '/.cat_installer/user.p12'\n if not os.path.isfile(self.ca_cert_file):\n print(Messages.cert_error)\n sys.exit(2)", "def __init__(__self__, *,\n auth_type: pulumi.Input[str],\n certificate: pulumi.Input[str],\n client_id: pulumi.Input[str],\n principal_id: pulumi.Input[str]):\n pulumi.set(__self__, \"auth_type\", 'servicePrincipalCertificate')\n pulumi.set(__self__, \"certificate\", certificate)\n pulumi.set(__self__, \"client_id\", client_id)\n pulumi.set(__self__, \"principal_id\", principal_id)", "def add_cert_and_key(priv_key, cert_list, alias):\n logger.info(\"Writing certificate and private key to filesystem\")\n\n # Determine which directory to store certs in\n if command_exists(\"update-ca-trust\"):\n ca_dir = \"/etc/pki/tls\"\n elif command_exists(\"update-ca-certificates\"):\n ca_dir = \"/etc/ssl\"\n else:\n logger.error(\"Cannot determine certs directory\")\n raise OSError(\n \"OS is missing a required command for CA trust. Either update-ca-trust or \"\n \"update-ca-certificates is required.\"\n )\n\n logger.info(\"Using cert directory:\" + ca_dir)\n\n with open(ca_dir + \"/private/\" + alias + \".key\", \"a\") as f:\n f.write(str(priv_key))\n\n for cert in cert_list:\n with open(ca_dir + \"/certs/\" + alias + \".crt\", \"a\") as f:\n f.write(cert)", "def opensslCmsSignedDataCreate( conveyedInfoFile, cert, privateKey ):\n opensslCmdArgs = [ \"openssl\", \"cms\", \"-sign\", \"-in\", conveyedInfoFile,\n \"-signer\", cert,\n \"-inkey\", privateKey,\n \"-outform\", \"der\", \"-nodetach\" ]\n conveyedInfoCmsSignedDerBase64 = runOpensslCmd( opensslCmdArgs, [ \"base64\" ] )\n return conveyedInfoCmsSignedDerBase64", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate_body: Optional[pulumi.Input[str]] = None,\n certificate_chain: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n path: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n __props__=None):\n ...", "def load_cert_chain(self, certfile, keyfile: Optional[Any] = ...):\n ...", "def write_cert(filename, content):\r\n with open(filename, 'w') as cert_file:\r\n cert_file.write(content)", "def main():\n try:\n return run_sysca(sys.argv[1:])\n except InvalidCertificate as ex:\n die(str(ex))", "def new_X509(self, host: str) -> Tuple[OpenSSL.crypto.X509, OpenSSL.crypto.PKey]: # pylint: disable=invalid-name\n\n # Generate a new key pair.\n key = new_RSA()\n\n # Generates new X509Request.\n req = OpenSSL.crypto.X509Req()\n req.get_subject().CN = host.encode(\"utf-8\")\n req.set_pubkey(key)\n req.sign(key, \"sha256\")\n\n # Generates new X509 certificate.\n cert = new_X509(common_name=host)\n cert.set_issuer(self.cert.get_subject())\n cert.set_pubkey(req.get_pubkey())\n\n # Sets the certificate 'subjectAltName' extension.\n hosts = [f\"DNS:{host}\"]\n\n if is_ip(host):\n hosts += [f\"IP:{host}\"]\n else:\n hosts += [f\"DNS:*.{host}\"]\n\n hosts = \", \".join(hosts).encode(\"utf-8\")\n cert.add_extensions([OpenSSL.crypto.X509Extension(b\"subjectAltName\", False, hosts)])\n\n # Signs the certificate with the CA's key.\n cert.sign(self.key, \"sha256\")\n\n return cert, key", "def add_cert(session, node_id, cert_id, cert_name,\n cert_location, cert_expiration, username='system_user'):\n session = validate_session(session)\n try:\n add_cert = SslInfo(node_id, cert_id, cert_name,\n cert_location, cert_expiration)\n session.add(add_cert)\n session.commit()\n return add_cert\n except Exception as e:\n session.rollback()", "def fusion_api_generate_certificate_signing_request(self, body, api=None, headers=None):\n return self.wsc.post(body, api=api, headers=headers)", "def req_handler(args):\n key = _get_key(args)\n subject = get_subject_arguments()\n req = create_certificate_request(key, subject=subject, file_name=args.req_out)\n if not args.req_out:\n print(print_certificate_request(req))\n return req", "def sign_certificate(csr):\n unique_filename = str(uuid.uuid4().hex)\n\n file = open(\"./csr_req/%s.csr\" % unique_filename, \"w\")\n file.write(csr.decode(\"utf-8\"))\n file.close()\n\n subprocess.run([\"../ca/scripts/sign.sh\", unique_filename], check=False)\n\n file = open(\"./csr_req/%s.p7b\" % unique_filename, \"r\")\n cert = file.read()\n\n os.remove(\"./csr_req/%s.csr\" % unique_filename)\n os.remove(\"./csr_req/%s.p7b\" % unique_filename)\n\n return cert", "def __init__(__self__, *,\n key_vault_cert_name: pulumi.Input[str],\n type: pulumi.Input[str],\n vault_uri: pulumi.Input[str],\n cert_version: Optional[pulumi.Input[str]] = None,\n exclude_private_key: Optional[pulumi.Input[bool]] = None):\n pulumi.set(__self__, \"key_vault_cert_name\", key_vault_cert_name)\n pulumi.set(__self__, \"type\", 'KeyVaultCertificate')\n pulumi.set(__self__, \"vault_uri\", vault_uri)\n if cert_version is not None:\n pulumi.set(__self__, \"cert_version\", cert_version)\n if exclude_private_key is None:\n exclude_private_key = False\n if exclude_private_key is not None:\n pulumi.set(__self__, \"exclude_private_key\", exclude_private_key)", "def __init__(__self__, *,\n certificate_body: pulumi.Input[str],\n private_key: pulumi.Input[str],\n certificate_chain: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n path: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):\n pulumi.set(__self__, \"certificate_body\", certificate_body)\n pulumi.set(__self__, \"private_key\", private_key)\n if certificate_chain is not None:\n pulumi.set(__self__, \"certificate_chain\", certificate_chain)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if name_prefix is not None:\n pulumi.set(__self__, \"name_prefix\", name_prefix)\n if path is not None:\n pulumi.set(__self__, \"path\", path)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)", "def CreateRequests(self, args):\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, self.resources)\n certificate = file_utils.ReadFile(args.certificate, 'certificate')\n private_key = file_utils.ReadFile(args.private_key, 'private key')\n\n request = self.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=self.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=self.project)\n\n return [request]", "def create_key ():", "def regen_cert(self, student, course_id, course=None, forced_grade=None, template_file=None):\r\n # TODO: when del_cert is implemented and plumbed through certificates\r\n # repo also, do a deletion followed by a creation r/t a simple\r\n # recreation. XXX: this leaves orphan cert files laying around in\r\n # AWS. See note in the docstring too.\r\n try:\r\n certificate = GeneratedCertificate.objects.get(user=student, course_id=course_id)\r\n certificate.status = status.unavailable\r\n certificate.save()\r\n except GeneratedCertificate.DoesNotExist:\r\n pass\r\n\r\n return self.add_cert(student, course_id, course, forced_grade, template_file)", "def _store_certificate(fullchain, key, domain=None, tag_prefix=None,\n region_name=None, acm_client=None, dry_run=False):\n #pylint:disable=unused-argument\n result = _check_certificate(fullchain, key, domain=domain)\n if not domain:\n domain = result['ssl_certificate']['common_name']\n cert, chain = _split_fullchain(fullchain)\n if not acm_client:\n acm_client = boto3.client('acm', region_name=region_name)\n kwargs = {}\n resp = acm_client.list_certificates()\n for acm_cert in resp['CertificateSummaryList']:\n if acm_cert['DomainName'] == domain:\n LOGGER.info(\"A certificate for domain %s has already been\"\\\n \" imported as %s - replacing\",\n domain, acm_cert['CertificateArn'])\n kwargs['CertificateArn'] = acm_cert['CertificateArn']\n break\n if not dry_run:\n resp = acm_client.import_certificate(\n Certificate=cert.encode('ascii'),\n PrivateKey=key.encode('ascii'),\n CertificateChain=chain.encode('ascii'),\n **kwargs)\n LOGGER.info(\"%s (re-)imported TLS certificate %s as %s\",\n tag_prefix, result['ssl_certificate'], resp['CertificateArn'])\n result.update({'CertificateArn': resp['CertificateArn']})\n return result", "def generate_ssl_object(module, ssl_cafile, ssl_certfile, ssl_keyfile,\n ssl_crlfile=None):\n\n ssl_files = {\n 'cafile': {'path': ssl_cafile, 'is_temp': False},\n 'certfile': {'path': ssl_certfile, 'is_temp': False},\n 'keyfile': {'path': ssl_keyfile, 'is_temp': False},\n 'crlfile': {'path': ssl_crlfile, 'is_temp': False}\n }\n\n for key, value in ssl_files.items():\n if value['path'] is not None:\n # TODO is that condition sufficient?\n if value['path'].startswith(\"-----BEGIN\"):\n # value is a content, need to create a tempfile\n fd, path = tempfile.mkstemp(prefix=key)\n with os.fdopen(fd, 'w') as tmp:\n tmp.write(value['path'])\n ssl_files[key]['path'] = path\n ssl_files[key]['is_temp'] = True\n elif not os.path.exists(os.path.dirname(value['path'])):\n # value is not a content, but path does not exist,\n # fails the module\n module.fail_json(\n msg='\\'%s\\' is not a content and provided path does not '\n 'exist, please check your SSL configuration.' % key\n )\n\n return ssl_files", "def update_cert(c, stack_name, domain_name, profile, create=False):\n action = 'create' if create else 'update'\n\n with chdir(WORKING_DIR):\n aws('cloudformation', f'{action}-stack',\n '--stack-name', f'{stack_name}-cert',\n '--template-body', f'file://cert.yaml',\n '--parameters',\n f'ParameterKey=DomainName,ParameterValue={domain_name}',\n f'--profile', f'{profile}')\n # Cert also needs adding to us-east-1 to be used by CloudFront\n aws('cloudformation', f'{action}-stack',\n '--stack-name', f'{stack_name}-cert',\n '--template-body', f'file://cert.yaml',\n '--parameters',\n f'ParameterKey=DomainName,ParameterValue={domain_name}',\n f'--profile', f'{profile}',\n '--region', 'us-east-1')", "def test_add_certificate_keys(self):\n response = self.client.post(\n '/api/v1/certificates', data=json.dumps(certificate_keys),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Invalid certificate_name key')\n assert response.status_code == 400", "def get_certinfo(doc):\n\n #set a two second default timeout to recieve a cert\n socket.setdefaulttimeout(2)\n doc['ssl'] = {} \n\n try:\n cert = ssl.get_server_certificate((doc['hostname'], 443))\n #sometimes certs come back as unicode so cast to str() aka ascii\n cert = M2Crypto.X509.load_cert_string(str(cert))\n\n except:\n syslog.syslog('[*] Failed to get ssl certificate from %s' % doc['hostname'])\n print('[*] Failed to get ssl certificate from %s' % doc['hostname'])\n #lets remove the ssl key and return the doc untouched\n doc.pop('ssl')\n return doc\n\n\n #get creation date\n doc['ssl']['created'] = cert.get_not_before().get_datetime().isoformat()\n #get not valid after, aka expiration data\n doc['ssl']['expire'] = cert.get_not_after().get_datetime().isoformat()\n #get issuer information\n doc['ssl']['issuer'] = cert.get_issuer().as_text()\n #get subject information\n doc['ssl']['subject'] = cert.get_subject().as_text()\n #get keysize, size() returns in bytes, so we multiply * 8 to get the number of bits\n doc['ssl']['keysize'] = cert.get_pubkey().size() * 8\n #get cert fingerprint for comparison\n doc['ssl']['fingerprint'] = cert.get_fingerprint()\n\n return doc", "def test_01_basics(self):\n\n self.ac.start()\n\n arg_dict = {\n \"cert\": os.path.join(self.cs_dir, self.get_cli_cert(\"src\")),\n \"key\": os.path.join(self.keys_dir, self.get_cli_key(\"src\")),\n \"dst\": self.durl,\n \"dcert\": os.path.join(self.cs_dir, self.get_cli_cert(\"dst\")),\n \"dkey\": os.path.join(self.keys_dir, self.get_cli_key(\"dst\")),\n \"pkg\": \"[email protected],5.11-0\",\n \"empty\": os.path.join(self.test_root, \"tmp/empty\"),\n \"noexist\": os.path.join(self.test_root, \"octopus\"),\n \"verboten\": self.verboten,\n }\n\n # We need an image for seed_ta_dir() to work.\n # TODO: there might be a cleaner way of doing this\n self.image_create()\n # Add the trust anchor needed to verify the server's identity.\n self.seed_ta_dir(\"ta7\")\n\n # We try to receive a pkg from a secured repo and publish it to\n # another secured repo where both repos require different\n # credentials\n self.pkgrecv(self.surl, \"--key {key} --cert {cert} \"\n \"-d {dst} --dkey {dkey} --dcert {dcert} \"\n \"{pkg}\".format(**arg_dict))\n\n # Now try to use the same credentials for source and dest.\n # This should fail.\n self.pkgrecv(self.surl, \"--key {key} --cert {cert} \"\n \"-d {dst} --dkey {key} --dcert {cert} \"\n \"{pkg}\".format(**arg_dict), exit=1)\n\n # Make sure we don't traceback when credential files are invalid\n # Src certificate option missing\n self.pkgrecv(self.surl, \"--key {key} -d {dst} \"\n \"--dkey {dkey} --dcert {dcert} \"\n \"{pkg}\".format(**arg_dict), exit=1)\n\n # Dst certificate option missing\n self.pkgrecv(self.surl, \"--key {key} --cert {cert} \"\n \"-d {dst} --dkey {dkey} \"\n \"{pkg}\".format(**arg_dict), exit=1)\n\n # Src key option missing\n self.pkgrecv(self.surl, \"--cert {cert} \"\n \"-d {dst} --dkey {dkey} --dcert {dcert} \"\n \"{pkg}\".format(**arg_dict), exit=1)\n\n # Dst key option missing\n self.pkgrecv(self.surl, \"--key {key} --cert {cert} \"\n \"-d {dst} --dcert {dcert} \"\n \"{pkg}\".format(**arg_dict), exit=1)\n\n # Src certificate not found\n self.pkgrecv(self.surl, \"--key {key} --cert {noexist} \"\n \"-d {dst} --dkey {dkey} --dcert {dcert} \"\n \"{pkg}\".format(**arg_dict), exit=1)\n\n # Dst certificate not found\n self.pkgrecv(self.surl, \"--key {key} --cert {cert} \"\n \"-d {dst} --dkey {dkey} --dcert {noexist} \"\n \"{pkg}\".format(**arg_dict), exit=1)\n\n # Src key not found\n self.pkgrecv(self.surl, \"--key {noexist} --cert {cert} \"\n \"-d {dst} --dkey {dkey} --dcert {dcert} \"\n \"{pkg}\".format(**arg_dict), exit=1)\n\n # Dst key not found\n self.pkgrecv(self.surl, \"--key {key} --cert {cert} \"\n \"-d {dst} --dkey {noexist} --dcert {dcert} \"\n \"{pkg}\".format(**arg_dict), exit=1)\n\n # Src certificate is empty file\n self.pkgrecv(self.surl, \"--key {key} --cert {empty} \"\n \"-d {dst} --dkey {dkey} --dcert {dcert} \"\n \"{pkg}\".format(**arg_dict), exit=1)\n\n # Dst certificate is empty file\n self.pkgrecv(self.surl, \"--key {key} --cert {cert} \"\n \"-d {dst} --dkey {dkey} --dcert {empty} \"\n \"{pkg}\".format(**arg_dict), exit=1)\n\n # Src key is empty file\n self.pkgrecv(self.surl, \"--key {empty} --cert {cert} \"\n \"-d {dst} --dkey {dkey} --dcert {dcert} \"\n \"{pkg}\".format(**arg_dict), exit=1)\n\n # Dst key is empty file\n self.pkgrecv(self.surl, \"--key {key} --cert {cert} \"\n \"-d {dst} --dkey {empty} --dcert {dcert} \"\n \"{pkg}\".format(**arg_dict), exit=1)\n \n # No permissions to read src certificate \n self.pkgrecv(self.surl, \"--key {key} --cert {verboten} \"\n \"-d {dst} --dkey {dkey} --dcert {dcert} \"\n \"{pkg}\".format(**arg_dict), su_wrap=True, exit=1)\n\n # No permissions to read dst certificate \n self.pkgrecv(self.surl, \"--key {key} --cert {cert} \"\n \"-d {dst} --dkey {dkey} --dcert {verboten} \"\n \"{pkg}\".format(**arg_dict), su_wrap=True, exit=1)\n\n # No permissions to read src key \n self.pkgrecv(self.surl, \"--key {verboten} --cert {cert} \"\n \"-d {dst} --dkey {dkey} --dcert {dcert} \"\n \"{pkg}\".format(**arg_dict), su_wrap=True, exit=1)\n\n # No permissions to read dst key \n self.pkgrecv(self.surl, \"--key {key} --cert {cert} \"\n \"-d {dst} --dkey {verboten} --dcert {dcert} \"\n \"{pkg}\".format(**arg_dict), su_wrap=True, exit=1)", "def generate_selfsigned_ca(clustername):\n\n from datetime import datetime, timedelta\n import ipaddress\n\n from cryptography import x509\n from cryptography.x509.oid import NameOID\n from cryptography.hazmat.primitives import hashes\n from cryptography.hazmat.backends import default_backend\n from cryptography.hazmat.primitives import serialization\n from cryptography.hazmat.primitives.asymmetric import rsa\n \n # Generate key\n key = rsa.generate_private_key(\n public_exponent=65537,\n key_size=2048,\n backend=default_backend(),\n )\n \n name = x509.Name([\n x509.NameAttribute(NameOID.COMMON_NAME, unicode(clustername))\n ])\n \n # path_len=1 means that this certificate can sign one level of sub-certs\n basic_contraints = x509.BasicConstraints(ca=True, path_length=1)\n now = datetime.utcnow()\n cert = (\n x509.CertificateBuilder()\n .subject_name(name)\n .issuer_name(name)\n .public_key(key.public_key())\n .serial_number(1)\n .not_valid_before(now)\n .not_valid_after(now + timedelta(days=10*365))\n .add_extension(basic_contraints, False)\n .sign(key, hashes.SHA256(), default_backend())\n )\n\n cert_pem = cert.public_bytes(encoding=serialization.Encoding.PEM)\n\n key_pem = key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption(),\n )\n\n return cert_pem, key_pem", "def fusion_api_create_appliance_selfsigned_certificate(self, body, api=None, headers=None):\n return self.appliance_certificate.put(body, api, headers)", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate_id: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def test_create_namespaced_pod_security_policy_subject_review(self):\n pass" ]
[ "0.70912915", "0.69870734", "0.6928703", "0.69179356", "0.6904272", "0.68842286", "0.6849011", "0.6816732", "0.67960584", "0.6723379", "0.6536452", "0.65353703", "0.6409508", "0.6319466", "0.6305267", "0.6305267", "0.6248997", "0.6193512", "0.61818844", "0.6146365", "0.61032104", "0.61026937", "0.60947907", "0.6080267", "0.6076016", "0.6064515", "0.60566473", "0.6017872", "0.6013381", "0.5988012", "0.5976463", "0.5943676", "0.59349585", "0.5927679", "0.59244615", "0.59050685", "0.58779085", "0.587552", "0.5802306", "0.5785092", "0.5765889", "0.57615805", "0.5743672", "0.5727532", "0.56922317", "0.5691412", "0.56767625", "0.5673881", "0.5670106", "0.5668221", "0.56666505", "0.5654365", "0.56323195", "0.56252277", "0.56163687", "0.5595348", "0.55948555", "0.55937284", "0.5591011", "0.55821663", "0.5577105", "0.557248", "0.55519545", "0.5549269", "0.5547752", "0.55420226", "0.5520431", "0.5504183", "0.5491508", "0.5488749", "0.5458467", "0.54525757", "0.54421985", "0.5440589", "0.54386735", "0.54311574", "0.54265624", "0.5415797", "0.5378079", "0.5372605", "0.53720886", "0.5368744", "0.536653", "0.5365851", "0.53635436", "0.53587747", "0.53573257", "0.5356716", "0.5354507", "0.5343988", "0.5343669", "0.53429884", "0.53364927", "0.5331627", "0.53254575", "0.5322162", "0.5310237", "0.53066397", "0.529016", "0.5289618" ]
0.6169744
19
New Elliptic Curve key
def new_ec_key(name='secp256r1'): if name not in EC_CURVES: raise ValueError('Unknown curve') return ec.generate_private_key(curve=EC_CURVES[name], backend=get_backend())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_ecc_private_key(curve_name: str = 'P-256') -> EllipticCurvePrivateKeyWithSerialization:\n curve_obj = {\n 'P-256': ec.SECP256R1(),\n 'P-384': ec.SECP384R1(),\n 'P-521': ec.SECP521R1()\n }[curve_name]\n return ec.generate_private_key(curve_obj, default_backend()) # type: ignore", "def generate_ecc_public_key(private_key: EllipticCurvePrivateKeyWithSerialization) -> EllipticCurvePublicKey:\n return private_key.public_key()", "def generate(cls, params = None, quiet = False):\n\n if params is None:\n if not quiet:\n logger.debug(\"Generating new ECDSA key parameters\")\n params = KeyParams.generateEC()\n\n assert isinstance(params, KeyParams)\n\n if not quiet:\n logger.debug(\"Generating new ECDSA key\")\n\n return cls(POW = rpki.POW.Asymmetric.generateFromParams(params.get_POW()))", "def create_object(self, version, key, **kwds):\n R, x = key\n\n if R is rings.QQ:\n from .ell_rational_field import EllipticCurve_rational_field\n return EllipticCurve_rational_field(x, **kwds)\n elif is_NumberField(R):\n from .ell_number_field import EllipticCurve_number_field\n return EllipticCurve_number_field(R, x)\n elif rings.is_pAdicField(R):\n from .ell_padic_field import EllipticCurve_padic_field\n return EllipticCurve_padic_field(R, x)\n elif is_FiniteField(R) or (is_IntegerModRing(R) and R.characteristic().is_prime()):\n from .ell_finite_field import EllipticCurve_finite_field\n return EllipticCurve_finite_field(R, x)\n elif R in _Fields:\n from .ell_field import EllipticCurve_field\n return EllipticCurve_field(R, x)\n from .ell_generic import EllipticCurve_generic\n return EllipticCurve_generic(R, x)", "def generate(self):\n if self.curvetype == KeyType.ECDSA_P256v1:\n self.private_key_obj = ec.generate_private_key(ec.SECP256R1(), default_backend())\n elif self.curvetype == KeyType.ECDSA_SECP256k1:\n self.private_key_obj = ec.generate_private_key(ec.SECP256K1(), default_backend())\n self.public_key_obj = self.private_key_obj.public_key()\n self._get_naive_private_key_bytes()\n self._get_naive_public_key_bytes()", "def new_key_pair(self):\n from plonevotecryptolib.KeyPair import KeyPair # avoids circular imports\n return KeyPair(self)", "def create_keypair(self):\n # NOTE: currently we rely on zmq for convenience, but we may use libnacl directly\n # if we want to isolate this module from zmq dependency.\n public_key, private_key = zmq.curve_keypair()\n return public_key, private_key", "def generate_private_key(self):\n if not self.curve:\n raise NoCurveError(\"Curve must be set prior to key generation.\")\n return self.load_private_key(SigningKey.generate(curve=self.curve))", "def ec_generate_key(security):\n assert isinstance(security, unicode)\n assert security in _curves\n ec = M2Crypto.EC.gen_params(_curves[security])\n ec.gen_key()\n return ec", "def generateIdentityKeyPair():\n keyPair = Curve.generateKeyPair()\n publicKey = IdentityKey(keyPair.getPublicKey())\n serialized = '0a21056e8936e8367f768a7bba008ade7cf58407bdc7a6aae293e2c' \\\n 'b7c06668dcd7d5e12205011524f0c15467100dd603e0d6020f4d293' \\\n 'edfbcd82129b14a88791ac81365c'\n serialized = binascii.unhexlify(serialized.encode())\n identityKeyPair = IdentityKeyPair(publicKey, keyPair.getPrivateKey())\n return identityKeyPair\n # return IdentityKeyPair(serialized=serialized)", "def new(cls, string=None):\n # Generates warner ECDSA objects\n if string:\n # deterministic private key\n ecdsaPrivkey = SigningKey.from_string(\n string=string, curve=SECP256k1)\n else:\n # random private key\n ecdsaPrivkey = SigningKey.generate(\n curve=SECP256k1, entropy=None)\n return cls.fromPrivkey(ecdsaPrivkey)", "def generate_election_key_pair(\n quorum: int, nonce: ElementModQ = None\n) -> ElectionKeyPair:\n polynomial = generate_polynomial(quorum, nonce)\n key_pair = ElGamalKeyPair(\n polynomial.coefficients[0], polynomial.coefficient_commitments[0]\n )\n proof = make_schnorr_proof(key_pair, rand_q())\n return ElectionKeyPair(key_pair, proof, polynomial)", "def generate_signing_keys():\n return SigningKey.generate(curve=SECP256k1)", "def generate_ECDSA_keys(curve=ecdsa.SECP256k1):\n sk = genereate_ECDSA_sk(curve=curve)\n vk = get_pub_key_from_sk(sk, sk_already_formatted=False)\n pk = vk.to_string().hex()\n addr = get_address_from_pub_key(pk)\n return sk, vk, pk, addr", "def new(cls, address):\n signing_key = ecdsa.SigningKey.generate()\n verifying_key = signing_key.get_verifying_key()\n privkey = signing_key.to_string().hex()\n pubkey = verifying_key.to_string().hex()\n return cls(privkey, pubkey, address)", "def ecdsa_key_gen():\n G = EcGroup()\n priv_sign = G.order().random()\n pub_verify = priv_sign * G.generator()\n return (G, priv_sign, pub_verify)", "def __init__(self, key_info):\n if (key_info.type != client_pb2.KeyInfo.ECDSA):\n raise error.UnsupportedAlgorithmError(\n \"Expected ECDSA key, but got key type %d\" % key_info.type)\n\n # Will raise a PemError on invalid encoding\n self.__der, _ = pem.from_pem(key_info.pem_key, self.__READ_MARKERS)\n try:\n self.__key = ecdsa.VerifyingKey.from_der(self.__der)\n except ecdsa.der.UnexpectedDER as e:\n raise error.EncodingError(e)", "def get_new_key() -> rsa.RSAPrivateKeyWithSerialization:\n\n return rsa.generate_private_key(\n backend=default_backend(), public_exponent=65537, key_size=2048\n )", "def _ecdsa_key(self,private_key):\n numbers = private_key.private_numbers()\n content = WriteMessage()\n\n public_key = private_key.public_key()\n serialized = public_key.public_bytes(\n encoding = serialization.Encoding .OpenSSH,\n format = serialization.PublicFormat.OpenSSH)\n\n\n # The SSH agent format somehow combines the elliptic curve's\n # `x` and `y` values (in `numbers.public_numbers`) into a single\n # `Q` value. I couldn't figure the specifics out exactly, but\n # the format is used exactly the same way int the OpenSSH\n # public key format, so we'll just reuse that one instead.\n\n pk_data = b64decode(serialized.split(None,2)[1])\n content.data.extend(pk_data)\n\n # nist = self._ecdsa_nists[private_key.curve.name]\n # content.write_string('ecdsa-sha2-{}'.format(nist))\n # content.write_string(nist)\n #\n # buffer = bytearray()\n # buffer.extend(b'0x04')\n #\n # x = numbers.public_numbers.x\n # y = numbers.public_numbers.y\n # for number in [x,y]:\n # tmp = WriteMessage()\n # tmp.write_mpint(number)\n # buffer.extend(tmp.data[4:])\n\n content.write_mpint(numbers.private_value)\n return content.data", "def gen_private_key(p, q, e):\n\n # Calculate 'n', n = p x q\n n = p * q\n # Calculate 'd', d = e^(-1) mod [(p-1)x(q-1)]\n phi = (p - 1) * (q - 1)\n # Need to use extended euclidean algorithm for 'd'\n gcd, d, b = egcd(e, phi)\n\n # Assign key parameters\n key_params = (n, e, d, p, q)\n # Construct private key\n key = RSA.construct(key_params)\n\n return key.exportKey()", "def create_key ():", "def create_key_pair(self) -> Keypair:\n res = self.context.post(\n \"/dsum/create_key_pair\", None, None, \"DSum: failed creating a Curve 25519 Keypair\")\n return Keypair(res['private_key_id'], res['public_key_id'])", "def ecdsaPrivkey(self):\n return SigningKey.from_string(\n string=self.rawPrivkey(), curve=SECP256k1)", "def gen_public_key(n, e):\n\n # Assign key parameters\n key_params = (n, e)\n # Construct private key\n key = RSA.construct(key_params)\n\n return key.exportKey()", "def generate_ephemeral_key(self, key):\n error = vscf_error_t()\n result = self._lib_vscf_ecc.vscf_ecc_generate_ephemeral_key(self.ctx, key.c_impl, error)\n VscfStatus.handle_status(error.status)\n instance = VscfImplTag.get_type(result)[0].take_c_ctx(cast(result, POINTER(VscfImplTag.get_type(result)[1])))\n return instance", "def EllipticCurve_from_j(j, minimal_twist=True):\n return EllipticCurve(coefficients_from_j(j, minimal_twist))", "def __init__(self, key_bytes, public=True):\n self.G = _globalECG\n if public:\n self.sec = None\n self.pub = EcPt.from_binary(key_bytes, self.G)\n self.optim = None\n else:\n self.sec = Bn.from_binary(sha256(key_bytes).digest())\n self.pub = self.sec * self.G.generator()\n self.optim = do_ecdsa_setup(self.G, self.sec)", "def __init__(self, curve=None, private_key=None, public_key=None):\n self.curve = curve\n self.private_key = None\n self.public_key = None\n if private_key:\n self.load_private_key(private_key)\n if public_key:\n self.load_received_public_key(public_key)", "def test_private_key_ec(self):\n priv = \"\"\"-----BEGIN EC PARAMETERS-----\nBggqhkjOPQMBBw==\n-----END EC PARAMETERS-----\n-----BEGIN EC PRIVATE KEY-----\nMHcCAQEEIJZ57L6f6ywtZa7VhsvthAShxjdrL9EIrVwVgxnmD5b3oAoGCCqGSM49\nAwEHoUQDQgAEIg6eBOPv5M2z4ANtsJukbimKWX04lanEdALsbu2xNCDBXJ0IJ4Sd\n3u4G1qvrKX0mBHd7yUPGui+7bvp084mNag==\n-----END EC PRIVATE KEY-----\"\"\"\n cert = \"\"\"-----BEGIN CERTIFICATE-----\nMIIBiTCCAS+gAwIBAgIJAINtiwRC4eBJMAoGCCqGSM49BAMCMCExDzANBgNVBAMM\nBkVDIDI1NjEOMAwGA1UECgwFV2ViQ0EwHhcNMTgwNTI3MTAyNTIyWhcNMTgwNjI2\nMTAyNTIyWjAhMQ8wDQYDVQQDDAZFQyAyNTYxDjAMBgNVBAoMBVdlYkNBMFkwEwYH\nKoZIzj0CAQYIKoZIzj0DAQcDQgAEIg6eBOPv5M2z4ANtsJukbimKWX04lanEdALs\nbu2xNCDBXJ0IJ4Sd3u4G1qvrKX0mBHd7yUPGui+7bvp084mNaqNQME4wHQYDVR0O\nBBYEFEmE51rEUz4TuD8oEAw2lvMfvi6LMB8GA1UdIwQYMBaAFEmE51rEUz4TuD8o\nEAw2lvMfvi6LMAwGA1UdEwQFMAMBAf8wCgYIKoZIzj0EAwIDSAAwRQIgfiKDoHB3\nWzRO1juSMyVBuBw2p1o0ab+3fBNDvff8PXcCIQCUKIyzTnM7Wz6TkABfqOcmx7n4\nsbRvdOg3CepLGW3Ytw==\n-----END CERTIFICATE-----\"\"\"\n pkcs12 = _create_pkcs12(priv, cert)\n self.assertEqual(utils.private_key_type(pkcs12), c.KEY_EC)", "def __init__(self, pubkey, e=65537):\n if isinstance(pubkey, int):\n self.key = RSA.RsaKey(n=pubkey, e=e)\n\n else:\n if not isinstance(pubkey, str):\n raise ValueError('pubkey must be str or int.')\n\n if '----' in pubkey:\n try:\n self.key = RSA.import_key(pubkey)\n except Exception as e:\n print(e)\n else:\n if pubkey == pubkey.lower():\n pubkey = int(pubkey, 16)\n self.key = RSA.RsaKey(n=pubkey, e=e)\n else:\n pubkey = '-----BEGIN PUBLIC KEY-----\\n' + pubkey + '\\n-----END PUBLIC KEY-----'\n try:\n self.key = RSA.import_key(pubkey)\n except Exception as e:\n print(e)", "def curve_from_string(K, ainvs):\n return EllipticCurve(ainvs_from_string(K, ainvs))", "def ed25519_private_key(ctx):\n\n key = ed25519.Ed25519PrivateKey.generate()\n\n ctx.data = str(\n key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.PKCS8,\n encryption_algorithm=serialization.NoEncryption(),\n ),\n \"utf-8\",\n )", "def from_public_parts(self, x: bytes, y: bytes):\n return asymmetric.ec.EllipticCurvePublicNumbers(\n int.from_bytes(x, 'big'),\n int.from_bytes(y, 'big'),\n asymmetric.ec.SECP256R1()\n ).public_key()", "def mk_keyobj_from_private_key(self, privkey):\n bn = BACKEND_KP.private_key_obj._backend._ffi.NULL\n bn_ptr = BACKEND_KP.private_key_obj._backend._lib.BN_bin2bn(privkey, len(privkey), bn)\n secret_val = BACKEND_KP.private_key_obj._backend._bn_to_int(bn_ptr)\n\n if self.curvetype == KeyType.ECDSA_P256v1:\n self.private_key_obj = ec.derive_private_key(secret_val, ec.SECP256R1(), default_backend())\n elif self.curvetype == KeyType.ECDSA_SECP256k1:\n self.private_key_obj = ec.derive_private_key(secret_val, ec.SECP256K1(), default_backend())\n self._get_naive_private_key_bytes()\n self.public_key_obj = self.private_key_obj.public_key()\n self._get_naive_public_key_bytes()", "def __init__(self, public_key=None):\n self.public_key = self.convert_public_key_to_ecdsa(public_key) if public_key else public_key", "def generate_ecdh_key_pair() -> tuple[X25519PrivateKey, bytes]:\n private_key = X25519PrivateKey.generate()\n public_key_raw = private_key.public_key().public_bytes(\n serialization.Encoding.Raw, serialization.PublicFormat.Raw\n )\n return private_key, public_key_raw", "def __init__(self, p, elliptic_curve, m, q, x_p, y_p, hash_func):\n self.p = p\n if isinstance(elliptic_curve, tuple):\n self.a, self.b = elliptic_curve\n assert(4 * self.a**3 + 27 * self.b**2 % p != 0)\n self.J = 1728 * self.div_mod_p(4 * self.a**3, 4 * self.a**3 + 27 * self.b**2) % p\n else:\n self.J = elliptic_curve\n assert(self.J != 0 and self.J != 1728)\n k = self.div_mod_p(self.J, 1728 - self.J)\n self.a = 3 * k % p\n self.b = 2 * k % p\n self.m = m\n self.q = q\n self.x_p = x_p\n self.y_p = y_p\n self.hash_func = hash_func\n assert(p > 3)\n #assert(isprime(p)) we have no simple methods for checking this\n assert(m % q == 0 and m // q >= 1)\n assert(self.mult((x_p, y_p), q) == None)\n assert(2**254 < q < 2**256 or 2**508 < q < 2**512)\n if 2**254 < q < 2**256:\n B = 31\n else:\n B = 131\n for t in range(1, B + 1):\n assert(self.pow_mod_p(p, t, q) != 1)\n assert(m != p)\n assert(self.J != 0 and self.J != 1728)", "def __init__(self, prev_hash: bytes, time: float, user_id: str, password: str):\r\n super().__init__(prev_hash, time, user_id)\r\n\r\n # Derive a public key from the password. Add user_id to password as salt\r\n self.public_key: ec.EllipticCurvePublicKey = Block.password_to_key(password + user_id).public_key()", "def __init__(self, private_key):\n self._sk = ed25519.Ed25519PrivateKey.from_private_bytes(private_key.bytes)", "def generate_keys(self, p, q, e):\n d = EucAlg(p, q)\n for i in d:\n if i == 0:\n raise Exception(\"p and q are not relatively prime.\")\n\n n = p*q\n phi_n = (p-1)*(q-1)\n d = EucAlg(e, phi_n)\n\n self._private_key = (d[0],n)\n self.public_key = (e,n)", "def dh_get_key():\n G = EcGroup()\n priv_dec = G.order().random()\n pub_enc = priv_dec * G.generator()\n return (G, priv_dec, pub_enc)", "def new_key(self, key_name=None):\r\n return self.key_class(self, key_name)", "def __init__(self, public_key):\n self._pk = ed25519.Ed25519PublicKey.from_public_bytes(public_key.bytes)", "def gen_public_key(g, private, p):\n return pow(g, private, p)", "def __init__(self):\n self._keypair = RSA.generate(2048)\n self.public_key = self._keypair.publickey().exportKey()", "def generate_key(seed):\n private_key = sha256(seed)\n public_key = privtopub(private_key)\n return {\"private\": private_key, \"public\": public_key}", "def generateRandomEC(bits = 0, p = 0, primeOrder = False):\n if (p == 0):\n p = next_prime(int(2**bits));\n \n T = GF(p)\n \n coefs = [0, 0, 0, None, None]\n while True:\n #random a_4\n coefs[3] = T.random_element();\n #random a_6\n coefs[4] = T.random_element();\n try:\n E = EllipticCurve(T, coefs)\n if primeOrder == False or is_prime(E.order()):\n break\n except ArithmeticError: #if E singular we try again\n pass\n return E", "def password_to_key(password: str):\r\n curve = ec.SECP256R1() # Elliptic curve\r\n digest = hashes.Hash(hashes.SHA256())\r\n digest.update(password.encode())\r\n password_int = int.from_bytes(digest.finalize(), \"big\")\r\n return ec.derive_private_key(password_int, curve)", "def generate_elgamal_auxiliary_key_pair() -> AuxiliaryKeyPair:\n elgamal_key_pair = elgamal_keypair_random()\n return AuxiliaryKeyPair(\n elgamal_key_pair.secret_key.to_hex(),\n elgamal_key_pair.public_key.to_hex(),\n )", "def convert_public_key_to_ecdsa(self, public_key):\n return PublicKey.fromPem('\\n-----BEGIN PUBLIC KEY-----\\n'+public_key+'\\n-----END PUBLIC KEY-----\\n')", "def new_X509(self, host: str) -> Tuple[OpenSSL.crypto.X509, OpenSSL.crypto.PKey]: # pylint: disable=invalid-name\n\n # Generate a new key pair.\n key = new_RSA()\n\n # Generates new X509Request.\n req = OpenSSL.crypto.X509Req()\n req.get_subject().CN = host.encode(\"utf-8\")\n req.set_pubkey(key)\n req.sign(key, \"sha256\")\n\n # Generates new X509 certificate.\n cert = new_X509(common_name=host)\n cert.set_issuer(self.cert.get_subject())\n cert.set_pubkey(req.get_pubkey())\n\n # Sets the certificate 'subjectAltName' extension.\n hosts = [f\"DNS:{host}\"]\n\n if is_ip(host):\n hosts += [f\"IP:{host}\"]\n else:\n hosts += [f\"DNS:*.{host}\"]\n\n hosts = \", \".join(hosts).encode(\"utf-8\")\n cert.add_extensions([OpenSSL.crypto.X509Extension(b\"subjectAltName\", False, hosts)])\n\n # Signs the certificate with the CA's key.\n cert.sign(self.key, \"sha256\")\n\n return cert, key", "def _newKey(self, key):\n pass", "def gen_key(self):\n\n if not self.private_key:\n self._gen_key()\n else:\n raise CryptoError(\"Private Key already existing\")", "def __init__(self, sk=None, n=None, h=None):\r\n if sk:\r\n self.n = sk.n\r\n self.h = sk.h\r\n elif n and h:\r\n self.n = n\r\n self.h = h\r\n else:\r\n raise Exception(\"Public Key construction failed: insufficient/wrong arguments\")\r\n\r\n self.signature_bound = Params[self.n][\"sig_bound\"]\r\n self.sig_bytelen = Params[self.n][\"sig_bytelen\"]", "def EllipticCurve_from_Weierstrass_polynomial(f):\n return EllipticCurve(coefficients_from_Weierstrass_polynomial(f))", "def test_public_key_ec(self):\n cert = \"\"\"-----BEGIN CERTIFICATE-----\nMIIBiTCCAS+gAwIBAgIJAINtiwRC4eBJMAoGCCqGSM49BAMCMCExDzANBgNVBAMM\nBkVDIDI1NjEOMAwGA1UECgwFV2ViQ0EwHhcNMTgwNTI3MTAyNTIyWhcNMTgwNjI2\nMTAyNTIyWjAhMQ8wDQYDVQQDDAZFQyAyNTYxDjAMBgNVBAoMBVdlYkNBMFkwEwYH\nKoZIzj0CAQYIKoZIzj0DAQcDQgAEIg6eBOPv5M2z4ANtsJukbimKWX04lanEdALs\nbu2xNCDBXJ0IJ4Sd3u4G1qvrKX0mBHd7yUPGui+7bvp084mNaqNQME4wHQYDVR0O\nBBYEFEmE51rEUz4TuD8oEAw2lvMfvi6LMB8GA1UdIwQYMBaAFEmE51rEUz4TuD8o\nEAw2lvMfvi6LMAwGA1UdEwQFMAMBAf8wCgYIKoZIzj0EAwIDSAAwRQIgfiKDoHB3\nWzRO1juSMyVBuBw2p1o0ab+3fBNDvff8PXcCIQCUKIyzTnM7Wz6TkABfqOcmx7n4\nsbRvdOg3CepLGW3Ytw==\n-----END CERTIFICATE-----\"\"\"\n x509 = crypto.load_certificate(PEM, cert)\n self.assertEqual(utils.public_key_type(x509), c.KEY_EC)", "def create_new_enclave(cls) :\n\n nonce = '{0:016X}'.format(random.getrandbits(64))\n\n try :\n enclave_data = pdo_enclave.create_enclave_info(nonce)\n except :\n raise Exception('failed to create enclave data')\n\n enclave_info = dict()\n enclave_info['nonce'] = nonce\n enclave_info['sealed_data'] = enclave_data.sealed_enclave_data\n enclave_info['verifying_key'] = enclave_data.verifying_key\n enclave_info['encryption_key'] = enclave_data.encryption_key\n enclave_info['enclave_id'] = enclave_data.verifying_key\n enclave_info['proof_data'] = ''\n if not pdo_enclave.enclave.is_sgx_simulator() :\n enclave_info['proof_data'] = enclave_data.proof_data\n\n return cls(enclave_info)", "def do_new(argv):\n\n global PRIVATE_KEY\n\n if not PRIVATE_KEY:\n PRIVATE_KEY = wallet.get_private_key()\n else:\n get_new = yes_or_no(\"Private key already exist, do you want generate new one ?\")\n if get_new:\n PRIVATE_KEY = wallet.get_private_key()\n print(\"Private Key: '\" + PRIVATE_KEY + \"'\")\n cmpr_pub_key = wallet.get_compressed_public_key(PRIVATE_KEY, 1)\n addr = wallet.public_key_to_address(cmpr_pub_key, 0)\n open(\"data/address\", \"w\").write(addr)\n print(\"Public key was saved to 'data/cmpr_pub_key'\")", "def generate_keys() -> tuple:\n private_key = ecdsa.SigningKey.generate(curve=curve)\n public_key = private_key.get_verifying_key()\n\n private_key = encode_private_key(private_key)\n public_key = encode_public_key(public_key)\n\n return public_key, private_key", "def gen_private_key():\n return DH.b2i(Random.new().read(DH_SIZE))", "def derive_public_key(private_key):\r\n\r\n Q = int.from_bytes(private_key, byteorder='big') * BIP32_CURVE.generator\r\n xstr = Q.x().to_bytes(32, byteorder='big')\r\n parity = Q.y() & 1\r\n return (2 + parity).to_bytes(1, byteorder='big') + xstr", "def randomCurve(p):\n x, y = randrange(1, p), randrange(1, p)\n a = randrange(1, p)\n b = (y ** 2 - x ** 3 - a * x) % p\n field = FiniteField(p)\n curve = EllipticCurve(field, a, b)\n point = curve(x, y)\n return curve, point", "def get_private_key(self, address58: str) -> 'EllipticCurvePrivateKey':\n return self.keys[address58]", "def generate(cls, keylength = 2048, quiet = False):\n\n if not quiet:\n logger.debug(\"Generating new %d-bit RSA key\", keylength)\n if generate_insecure_debug_only_rsa_key is not None:\n return cls(POW = generate_insecure_debug_only_rsa_key())\n else:\n return cls(POW = rpki.POW.Asymmetric.generateRSA(keylength))", "def generate_key(self):\n\n self.key = Fernet.generate_key()\n self.cryptor = Fernet(self.key)", "def get_key_pair() -> typing.Tuple[bytes, bytes]: \n return _get_key_pair_from_sk(ecdsa.SigningKey.generate(curve=CURVE))", "def EllipticCurve_from_plane_curve(C, P):\n from sage.misc.superseded import deprecation\n deprecation(3416, 'use Jacobian(C) instead')\n # Note: this function never used the rational point\n from sage.schemes.elliptic_curves.jacobian import Jacobian\n return Jacobian(C)", "def __init__(self, key):\n self.bs = 16\n self.key = hashlib.sha256(key.encode()).digest()", "def generate_key(self, alg_id):\n error = vscf_error_t()\n result = self._lib_vscf_ecc.vscf_ecc_generate_key(self.ctx, alg_id, error)\n VscfStatus.handle_status(error.status)\n instance = VscfImplTag.get_type(result)[0].take_c_ctx(cast(result, POINTER(VscfImplTag.get_type(result)[1])))\n return instance", "def curve_from_strings(K, ainv_string_list):\n return EllipticCurve(ainvs_from_strings(K, ainv_string_list))", "def make_public_key(prime, base, rnumber):\n\n pub_key = (base ** rnumber) % prime\n return pub_key", "def verify_curve(curve):\n # What follows is the implementation of the verification algorithm\n # described in \"The Elliptic Curve Digital Signature Algorithm (ECDSA)\",\n # from Certicom. There just a few difference between the original algorithm\n # and the implementation:\n #\n # * a few variable names have been changed for the sake of clarity;\n # * the document from Certicom allows arbritrary seeds with bit length\n # >= 160; here we only care about seeds that are exactly 160-bit long.\n\n if curve.seed.bit_length() > 160:\n raise VerificationFailed('seed too long')\n\n seed_bytes = curve.seed.to_bytes(length=160 // 8, byteorder='big')\n\n # Define t, s and v as specified on the document.\n t = curve.p.bit_length()\n s = (t - 1) // 160\n v = t - 160 * s\n\n # 1. Compute h = SHA-1(seed_bytes) and let c0 denote the bit string of\n # length v bits obtained by taking the v rightmost bits of h.\n h = hashlib.sha1(seed_bytes).digest()\n h = int.from_bytes(h, byteorder='big')\n\n c0 = h & ((1 << v) - 1)\n\n # 2. Let w[0] denote the bit string of length v bits obtained by setting\n # the leftmost bit of c0 to 0.\n #\n # Note: here we use 160 bit instead of v bits, as required by the document.\n # We do so to make the code easier, and because it does not make any\n # difference (see the step 6).\n w0 = c0 & ((1 << v - 1) - 1)\n w = [w0.to_bytes(length=160 // 8, byteorder='big')]\n\n # 3. Let z be the integer whose binary expansion is given by 160-bit string\n # seed_bytes.\n z = curve.seed\n\n # 4. For i from 1 to s do:\n for i in range(1, s + 1):\n # 4.1 Let s_i be 160-bit string which is the binary expansion of the\n # integer (z + i) % (2 ** g).\n z_i = ((z + i) % (2 ** 160))\n s_i = z_i.to_bytes(length=160 // 8, byteorder='big')\n\n # 4.2 Compute w_i = SHA-1(s_i).\n w_i = hashlib.sha1(s_i).digest()\n w.append(w_i)\n\n # 5. Let w be the bit string obtained by concatenating w_0,w_1,...,w_s.\n w = b''.join(w)\n\n # 6. Let c be the integer whose integer expansion is given by w.\n #\n # On step 2, we said that we used a longer bit length for the first element\n # of w. This is correct because the resulting c does not change: using 160\n # bits instead of v bits is equivalent to add some zeroes to the left of c.\n c = int.from_bytes(w, 'big')\n\n # If b ** 2 * c == a ** 3 (mod p) then accept; otherwise reject.\n if (curve.b * curve.b * c - curve.a * curve.a * curve.a) % curve.p != 0:\n raise VerificationFailed('curve verification failed')", "def generate(self, module):\n\n # If size is wrong, delete the key. A new key will be generated in the next step.\n if self.key_current_size != self.size and not self.ignore_size:\n self.remove()\n self.key_exists = False\n else:\n self.changed = False\n\n # If there is no key or user has set \"force\"\n if not self.key_exists or self.force:\n if self.type == \"RSA\":\n self.key = crypto_rsa.generate_private_key(public_exponent=65537, key_size=self.size, backend=crypto_default_backend())\n elif self.type == \"DSA\":\n self.key = crypto_dsa.generate_private_key(key_size=self.size, backend=crypto_default_backend())\n elif self.type == \"ECDSA\":\n if self.size == 256:\n self.curve = crypto_ec.SECP256R1()\n elif self.size == 384:\n self.curve = crypto_ec.SECP384R1()\n elif self.size == 521:\n self.curve = crypto_ec.SECP521R1()\n self.key = crypto_ec.generate_private_key(curve=self.curve, backend=crypto_default_backend())\n elif self.type == \"ED25519\":\n self.size = 128\n self.curve = \"EC25519\"\n else:\n raise HostkeyError(\"Unknown key type.\")\n\n if self.type != \"ED25519\":\n self.privkey = self.key.private_bytes(crypto_serialization.Encoding.PEM, crypto_serialization.PrivateFormat.PKCS8, crypto_serialization.NoEncryption())\n self.pubkey = self.key.public_key().public_bytes(crypto_serialization.Encoding.OpenSSH, crypto_serialization.PublicFormat.OpenSSH)\n\n try:\n privfile = os.open(self.fullpath, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, self.mode)\n os.write(privfile, self.privkey)\n os.close(privfile)\n pubfile = os.open(self.fullpath + \".pub\", os.O_WRONLY | os.O_CREAT | os.O_TRUNC, self.mode)\n os.write(pubfile, self.pubkey)\n os.close(pubfile)\n self.changed = True\n except IOError:\n self.remove()\n raise HostkeyError(get_exception())\n else:\n # use ssh-keygen to generate ED25519 Hostkeys\n # Keyfile must not exist, as there is no \"force-overwrite\" in ssh-keygen\n self.remove()\n retcode = subprocess.call([\"ssh-keygen\", \"-q\", \"-t\", \"ed25519\", \"-N\", '', \"-f\", self.fullpath])\n self.changed = True\n else:\n self.changed = False\n\n file_args = module.load_file_common_arguments(module.params)\n file_args['path'] = self.fullpath\n if module.set_fs_attributes_if_different(file_args, False):\n self.changed = True\n file_args['path'] = self.fullpath + \".pub\"\n file_args['mode'] = self.pubmode\n if module.set_fs_attributes_if_different(file_args, False):\n self.changed = True", "def __init__(self, key: bytearray):\n self.__key = key\n self.__KSA(bytearray([i for i in range(256)]))", "def private_key(self):", "def test_set_tmp_ecdh(self):\n context = Context(SSLv23_METHOD)\n for curve in get_elliptic_curves():\n if curve.name.startswith(\"Oakley-\"):\n # Setting Oakley-EC2N-4 and Oakley-EC2N-3 adds\n # ('bignum routines', 'BN_mod_inverse', 'no inverse') to the\n # error queue on OpenSSL 1.0.2.\n continue\n # The only easily \"assertable\" thing is that it does not raise an\n # exception.\n context.set_tmp_ecdh(curve)", "def private_key():\n return \"Toholampi summer festival 2017 has the most harcore rock bands\"", "def _create_pkey(self, commonname, serial):\n pkey = PKey()\n pkey.generate_key(crypto.TYPE_RSA, self.key_bits)\n private = crypto.dump_privatekey(crypto.FILETYPE_PEM,\n pkey).decode()\n key_path = self._get_key_path(commonname, serial)\n if os.path.exists(key_path):\n raise FileExistsError(key_path)\n with open(key_path, 'w') as private_file:\n private_file.writelines(private)\n\n key_link = self._get_key_link(commonname)\n if os.path.exists(key_link):\n os.unlink(key_link)\n os.symlink(os.path.basename(key_path), key_link)\n\n return pkey", "def make_final_key(prime, public, private):\n\n key = (public ** private) % prime\n return key", "def _slowmath_rsa_construct(n=None, e=None, d=None, p=None, q=None, u=None):\r\n#def _slowmath_rsa_construct(n, e, d=None, p=None, q=None, u=None):\r\n # Anthony - changed to allow user to create a private key\r\n # without the public keys\r\n #assert isinstance(n, long)\r\n #assert isinstance(e, long)\r\n # Anthony - modified May 9 to allow type int for each arguement.\r\n assert isinstance(n, (int, long, type(None)))\r\n assert isinstance(e, (int, long, type(None)))\r\n assert isinstance(d, (int, long, type(None)))\r\n assert isinstance(p, (int, long, type(None)))\r\n assert isinstance(q, (int, long, type(None)))\r\n assert isinstance(u, (int, long, type(None)))\r\n obj = _slowmath_RSAKey()\r\n # Anthony - changed to allow user to create a private key\r\n # without the public keys\r\n #obj.n = n\r\n #obj.e = e\r\n if n is not None: obj.n = n\r\n if e is not None: obj.e = e\r\n if d is not None: obj.d = d\r\n if p is not None: obj.p = p\r\n if q is not None: obj.q = q\r\n if u is not None: obj.u = u\r\n return obj", "def generate_key(self):\n key = rsa.generate_private_key(\n public_exponent=self.settings['key_public_exponent_size'],\n key_size=self.settings['key_size'],\n backend=default_backend()\n )\n return key", "def create_key() -> RSA.RsaKey:\n\n return RSA.generate(1024, Crypto.Random.new().read)", "def generate_aes_key ( ) :\n import hashlib\n sr = Crypto.Random.random.StrongRandom( )\n key_bits = sr.getrandbits( 256 )\n sha_key = hashlib.sha256( str( key_bits ) ).digest( )\n return sha_key", "def __init__(self, p=P_NIST, g=G_NIST):\n dh_a = self._random_int(p)\n dh_A = gmp.powmod(g, dh_a, p)\n\n self._dh_p = p\n self._dh_g = g\n self._secret_key = dh_a\n self._public_key = dh_A", "def parse_key(raw_key):\n raw_key_bytes = raw_key.encode('ascii')\n try:\n validate_cmek(raw_key)\n key_type = KeyType.CMEK\n sha256 = None\n except errors.Error:\n if len(raw_key) != 44:\n raise\n key_type = KeyType.CSEK\n sha256 = hash_util.get_base64_hash_digest_string(\n hashlib.sha256(base64.b64decode(raw_key_bytes)))\n return EncryptionKey(key=raw_key, sha256=sha256, type=key_type)", "def new_public_key(self):\n\n option = 'new_public_key'\n _file = self.__get_option(option)\n\n if _file and not os.path.exists(_file) and not os.path.isfile(_file):\n self.log.error(\"Paramenter '%s' points to non-existing file '%s')\" % \\\n (option, _file))\n raise ConfigError('File Error', \"Paramenter '%s' points to non-existing file '%s')\" % \\\n (option, _file))\n else:\n return None", "def get_key(key):\n encrypt_key = pow(key, e, n)\n return encrypt_key", "def Generate(size=keyinfo.RSA_PRIV.default_size):\n key = RSA.generate(size, util.RandBytes)\n #NOTE: PyCrypto stores p < q, u = p^{-1} mod q\n #But OpenSSL and PKCS8 stores q < p, invq = q^{-1} mod p\n #So we have to reverse the p and q values\n params = { 'privateExponent': util.PadBytes(util.BigIntToBytes(key.d), 1),\n 'primeP': util.PadBytes(util.BigIntToBytes(key.q), 1),\n 'primeQ': util.PadBytes(util.BigIntToBytes(key.p), 1),\n 'primeExponentP': util.PadBytes(util.BigIntToBytes(key.d % (key.q - 1)), 1),\n 'primeExponentQ': util.PadBytes(util.BigIntToBytes(key.d % (key.p - 1)), 1),\n 'crtCoefficient': util.PadBytes(util.BigIntToBytes(key.u), 1)}\n pubkey = key.publickey()\n pub_params = { 'modulus': util.PadBytes(util.BigIntToBytes(key.n), 1),\n 'publicExponent': util.PadBytes(util.BigIntToBytes(key.e), 1)}\n pub = RsaPublicKey(pub_params, pubkey, size)\n return RsaPrivateKey(params, pub, key, size)", "def cred_init(self, rp_id: str) -> (bytes, ec.EllipticCurvePrivateKey):\n\n rp_id_hash = sha256(rp_id.encode())\n private_key = ec.generate_private_key(ec.SECP256R1(), default_backend())\n\n # This may look like a cryptography but it's not.\n # Never try to use it in a real project.\n aesgcm = AESGCM(self.master_key)\n data = private_key.private_numbers().private_value.to_bytes(32, 'big')\n aad = rp_id_hash\n nonce = sha256(aad + self.master_key)[4:16]\n credential_id = aesgcm.encrypt(nonce, data, aad)\n\n return credential_id, private_key", "def __init__(self, private_key):\n if private_key:\n if isinstance(private_key, str): # base58 encoded string\n self.private_key = PrivateKey.from_b58check(private_key)\n else:\n self.private_key = private_key\n self.public_key = self.private_key.public_key\n else:\n self.private_key = None\n self.public_key = None", "def getG(compressed=True):\n priv = \"\\x00\"*31 + \"\\x01\"\n G = secp256k1.PrivateKey(priv, ctx=ctx).pubkey.serialize(compressed)\n return G", "def new_extension(name, value, critical=0, _pyfree=1):\n if name == 'subjectKeyIdentifier' and \\\n value.strip('0123456789abcdefABCDEF:') is not '':\n raise ValueError('value must be precomputed hash')\n lhash = m2.x509v3_lhash()\n ctx = m2.x509v3_set_conf_lhash(lhash)\n x509_ext_ptr = m2.x509v3_ext_conf(lhash, ctx, name, value)\n x509_ext = X509_Extension(x509_ext_ptr, _pyfree)\n x509_ext.set_critical(critical)\n return x509_ext", "def generate_key():\n key = crypto.Key.generate_key()\n click.echo('Private Key (len {}):: \\n{}'.format(\n len(key.get_privkey()),\n hexlify(key.get_privkey())))\n click.echo('Public Key (len {})::\\n{}'.format(\n len(key.get_pubkey()),\n hexlify(key.get_pubkey())))", "def __init__(self, gen_priv_key: bool = False, priv_key_path: str = None):\n self.priv_key = None\n self.pub_key = None\n\n # max size = (bytes(rsa) - 2 * bytes(hash) - 2),\n # currently hard-coded to 190 = 256 - 2 * 32 - 2\n self.max_encrypt_size = 190\n\n if gen_priv_key:\n self.priv_key = RSA.generate(2048)\n if priv_key_path is not None:\n path = pathlib.Path(priv_key_path)\n with open(path.as_posix(), 'w') as f:\n f.write(self.priv_key.export_key().decode('utf-8'))\n elif priv_key_path is not None:\n path = pathlib.Path(priv_key_path)\n if path.is_file():\n self.priv_key = RSA.importKey(open(path.as_posix()).read())\n else:\n raise Exception(\"Failed to open file {}\".format(path.as_posix))\n\n if self.priv_key is not None:\n self.pub_key = self.priv_key.publickey()\n\n # delegate encrypt/decrypt function\n self.cipher = PKCS1_OAEP.new(self.priv_key, hashAlgo=SHA256)\n self.decrypt = self.cipher.decrypt", "def save_ecc_public_key(ec_public_key: EllipticCurvePublicKey, file_path: str,\n encoding: Encoding = Encoding.PEM) -> None:\n pem_data = ec_public_key.public_bytes(encoding=encoding, format=serialization.PublicFormat.SubjectPublicKeyInfo)\n with open(file_path, 'wb') as f:\n f.write(pem_data)", "def save_ecc_private_key(ec_private_key: EllipticCurvePrivateKeyWithSerialization, file_path: str,\n password: str = None,\n encoding: Encoding = Encoding.PEM) -> None:\n serialized_private = ec_private_key.private_bytes(encoding=encoding,\n format=serialization.PrivateFormat.PKCS8,\n encryption_algorithm=serialization.BestAvailableEncryption\n (password.encode('utf-8')) if password\n else serialization.NoEncryption())\n with open(file_path, 'wb') as f:\n f.write(serialized_private)", "def generate_key(domain_name):\n key = rsa.generate_private_key(\n public_exponent=65537,\n key_size=2048,\n backend=default_backend()\n )\n\n #storing client's private key\n with open(domain_name + \".key\", \"wb\") as f:\n f.write(key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption(),\n ))\n\n return key", "def test_ec(self):\n key = c.KEY_EC\n usage = [\n c.KU_DIGITALSIGNATURE,\n c.KU_NONREPUDIATION,\n c.KU_KEYAGREEMENT,\n c.KU_ENCIPHERONLY,\n c.KU_DECIPHERONLY,\n ]\n self.assertTrue(utils.check_key_usage(key, usage))", "def createKeyPair(type, bits):\n pkey = crypto.PKey()\n pkey.generate_key(type, bits)\n return pkey", "def gen_key_pair():\n sk = gen_secret_key(BITCOIN.gen.n)\n pk = PublicKey.from_sk(sk)\n return sk, pk" ]
[ "0.7240443", "0.702215", "0.69358593", "0.69095886", "0.6824323", "0.644343", "0.6409015", "0.63836247", "0.6371176", "0.63416773", "0.6324077", "0.62912446", "0.6263568", "0.62155515", "0.6212738", "0.6199788", "0.6176039", "0.6160097", "0.6147054", "0.6129228", "0.61161697", "0.60951656", "0.60923266", "0.60830665", "0.6074731", "0.6056661", "0.60160357", "0.5999179", "0.59897184", "0.5974682", "0.59667325", "0.59414756", "0.59190094", "0.5900077", "0.58995265", "0.5892305", "0.58850515", "0.5883603", "0.58785087", "0.58549774", "0.58163226", "0.58070636", "0.5806327", "0.5743145", "0.5737539", "0.5724559", "0.5717235", "0.57059294", "0.56913596", "0.56660116", "0.56621575", "0.56600666", "0.5655176", "0.56418043", "0.56405914", "0.56316245", "0.56014156", "0.55992293", "0.55965877", "0.55923104", "0.55763584", "0.5561077", "0.556057", "0.55346173", "0.55178535", "0.5512225", "0.5498821", "0.5496769", "0.54875934", "0.5486651", "0.5474518", "0.54734236", "0.5448508", "0.5445702", "0.54347694", "0.5429044", "0.5425769", "0.54218423", "0.5421276", "0.5421093", "0.5407881", "0.53892994", "0.5387459", "0.5384482", "0.53809166", "0.5375729", "0.53737795", "0.5372805", "0.5371968", "0.5369793", "0.53688705", "0.53666526", "0.5364669", "0.53562874", "0.5348925", "0.53485346", "0.534704", "0.5339571", "0.5337428", "0.5336" ]
0.78493464
0
Serialize key in PEM format, optionally encrypted.
def key_to_pem(key, password=None): if password: enc = BestAvailableEncryption(as_bytes(password)) else: enc = NoEncryption() return key.private_bytes(Encoding.PEM, PrivateFormat.PKCS8, enc)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def encode_key(self, key):\n return key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption(),\n ).decode(encoding='UTF-8')", "def serialize_key(key: str) -> bytes:\n return key.encode(\"utf-8\")", "def serializePrivateKey(private_key):\n\treturn private_key.private_bytes(\n\t\tencoding=serialization.Encoding.PEM,\n\t\tformat=serialization.PrivateFormat.PKCS8,\n\t\tencryption_algorithm=serialization.NoEncryption()\n\t)", "def jwt_key_to_pem(self, key_json_dict):\n pub_key = RSAAlgorithm.from_jwk(json.dumps(key_json_dict))\n return pub_key.public_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PublicFormat.SubjectPublicKeyInfo,\n )", "def get_private_key_in_pem(self):\n serialized_private = self.private_key_obj.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()\n )\n return serialized_private", "def get_public_key_in_pem(self):\n serialized_public = self.public_key_obj.public_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PublicFormat.SubjectPublicKeyInfo\n )\n return serialized_public", "def serializePublicKey(public_key):\n\treturn public_key.public_bytes(\n\t\tencoding=serialization.Encoding.PEM,\n\t\tformat=serialization.PublicFormat.SubjectPublicKeyInfo\n\t)", "def write_key(self):\n\t key = Fernet.generate_key()\n\t with open(\"key.key\", \"wb\") as key_file:\n\t key_file.write(key)", "def serialize_encrypted_data_key(encrypted_data_key):\n encrypted_data_key_format = (\n \">\" # big endian\n \"H\" # key provider ID length\n \"{provider_id_len}s\" # key provider ID\n \"H\" # key info length\n \"{provider_info_len}s\" # key info\n \"H\" # encrypted data key length\n \"{enc_data_key_len}s\" # encrypted data key\n )\n return struct.pack(\n encrypted_data_key_format.format(\n provider_id_len=len(encrypted_data_key.key_provider.provider_id),\n provider_info_len=len(encrypted_data_key.key_provider.key_info),\n enc_data_key_len=len(encrypted_data_key.encrypted_data_key),\n ),\n len(encrypted_data_key.key_provider.provider_id),\n to_bytes(encrypted_data_key.key_provider.provider_id),\n len(encrypted_data_key.key_provider.key_info),\n to_bytes(encrypted_data_key.key_provider.key_info),\n len(encrypted_data_key.encrypted_data_key),\n encrypted_data_key.encrypted_data_key,\n )", "def convert_key_to_pem ( key_filename, output_filename ) :\n cmd = 'openssl rsa -in ' + key_filename + ' -outform PEM -out ' + output_filename\n return subprocess.call( cmd, shell = True )", "def raw(self) -> bytes:\n return bytes(self._signing_key)", "def serialize_key(key) -> str:\n if not isinstance(key, str):\n key = repr(key)\n return key", "def save(self, save_dir):\n path = os.path.join(save_dir, self.name + \".pem\")\n with open(path, \"wb\") as f:\n f.write(self.public_key)", "def write_key():\n key = fernet.Fernet.generate_key()\n keyfile = open(KEY_PATH,'wb')\n keyfile.write(key)\n keyfile.close()", "def parse_key(key: RSA.RsaKey) -> str:\n\n return binascii.hexlify(key.exportKey(\n format='DER')).decode('ascii')", "def convert_key_to_string(key):\n\n return key.encode(encoder=nacl.encoding.Base64Encoder).decode('utf-8')", "def serialize(self, data):\n assert self._key is not None\n assert self._cert is not None\n try:\n data = self._serialize(data)\n signature = b64encode(self._key.sign(data, self._digest))\n signer = self._cert.get_id()\n return self._serialize(dict(data=data,\n signer=signer,\n signature=signature))\n except Exception, exc:\n raise SecurityError(\"Unable to serialize: %r\" % (exc, ))", "def encryption_key(self) -> typing.Optional[aws_cdk.aws_kms.IKey]:\n ...", "def encryption_key(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"encryption_key\")", "def get_private_key_in_der(self):\n serialized_private = self.private_key_obj.private_bytes(\n encoding=serialization.Encoding.DER,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()\n )\n return serialized_private", "def privateKey2Text(key:RSA.RsaKey):\n return b58encode(key.exportKey('DER'))", "def serialized_private_key(self):\n if self._serialized_private_key is not None:\n return self._serialized_private_key\n\n location = self.settings.Location\n if location.AttachmentName:\n self._serialized_private_key = self.binaries[location.AttachmentName.text].content\n return self._serialized_private_key\n else:\n with open(location.FileName.text, 'rb') as file:\n self._serialized_private_key = file.read()\n return self._serialized_private_key", "def _dumpKey(self, key):\n return self.serializer.dumpKey((self.path, self._internalNs, key))", "def raw_key(self) -> bytes:\n return bytes(self.data_bytes[ProofPath._Positions.KEY_POS : ProofPath._Positions.KEY_POS + KEY_SIZE])", "def _serializeKey(entityId, key):\n Identifier.checkIdentifier(key)\n return \"%s\\x1D%s\" % (entityId, key)", "def get_pub_key(self):\n return \"RSA {0}\".format(self._cert.get_pubkey().bits)", "def get_key(self):\r\n return self.__encryption_key", "def _get_encryption_key(self, **options):\n\n return self._public_key", "def write_key(key_name):\n key = Fernet.generate_key()\n with open(key_name, \"wb\") as key_file:\n key_file.write(key)", "def get_public_key_in_der(self):\n serialized_public = self.public_key_obj.public_bytes(\n encoding=serialization.Encoding.DER,\n format=serialization.PublicFormat.SubjectPublicKeyInfo\n )\n return serialized_public", "def serializeParameters(parameters):\n\treturn parameters.parameter_bytes(\n\t\tencoding=serialization.Encoding.PEM,\n\t\tformat=serialization.ParameterFormat.PKCS3\n\t)", "async def client_public_key(self) -> bytes:\n raise NotImplementedError", "def encrypt_data(self, params):\n from django.core.signing import dumps\n return dumps(params, salt=self.salt_namespace)", "def key_to_struct(key: RsaKey) -> bytes:\n mod = int_to_bytes(key.n)\n exponent = int_to_bytes(key.e)\n\n return b\"\\x00\\x00\\x00\\x80\" + mod + b\"\\x00\\x00\\x00\\x03\" + exponent", "def key_encryption_key(self) -> Optional[pulumi.Input['KeyVaultAndKeyReferenceArgs']]:\n return pulumi.get(self, \"key_encryption_key\")", "def _serialize_private_key(private_key, password=None):\n error = None\n pvt_key_loaders = [\n load_pem_private_key, load_der_private_key\n ]\n pvt_key = None\n for loader in pvt_key_loaders:\n if not pvt_key:\n try:\n pvt_key = loader(\n private_key.encode('utf-8'),\n password=password,\n backend=default_backend()\n )\n error = False\n break\n except (ValueError, UnsupportedAlgorithm) as err:\n error = err\n if error:\n raise errors.InvalidPrivateKeyError(error)\n else:\n return pvt_key", "def get_pvk_pem_from_bytes(pvk: bytes) -> bytes:\n sk = ecdsa.SigningKey.from_string(pvk, curve=CURVE)\n\n return sk.to_pem()", "def get_pub_key_bytes(priv_key: rsa.RSAPrivateKey) -> bytes:\n k = priv_key.public_key()\n return k.public_bytes(encoding=serialization.Encoding.PEM,\n format=serialization.PublicFormat.SubjectPublicKeyInfo)", "def serialize_wrapped_key(key_provider, wrapping_algorithm, wrapping_key_id, encrypted_wrapped_key):\n if encrypted_wrapped_key.iv is None:\n key_info = wrapping_key_id\n key_ciphertext = encrypted_wrapped_key.ciphertext\n else:\n key_info = struct.pack(\n \">{key_id_len}sII{iv_len}s\".format(\n key_id_len=len(wrapping_key_id), iv_len=wrapping_algorithm.algorithm.iv_len\n ),\n to_bytes(wrapping_key_id),\n len(encrypted_wrapped_key.tag) * 8, # Tag Length is stored in bits, not bytes\n wrapping_algorithm.algorithm.iv_len,\n encrypted_wrapped_key.iv,\n )\n key_ciphertext = encrypted_wrapped_key.ciphertext + encrypted_wrapped_key.tag\n return EncryptedDataKey(\n key_provider=MasterKeyInfo(provider_id=key_provider.provider_id, key_info=key_info),\n encrypted_data_key=key_ciphertext,\n )", "def publicKey2Text(key:RSA.RsaKey):\n return b58encode(key.exportKey('DER'))", "def to_pem(self, encoding=\"pem\"):\n return public_to_pem(self, encoding)", "def deserialize_key(key: bytes) -> str:\n return key.decode()", "def base64_pub_encode(self, key):\n (y, g, p, q) = (str(key.y), str(key.g), str(key.p), str(key.q))\n return base64.b64encode((y + \",\" + g + \",\" + p + \",\" + q).encode('utf-8')).decode('utf-8')", "def _encrypted_user_photo_key_str(self):\r\n face_aes_key_str = settings.VERIFY_STUDENT[\"SOFTWARE_SECURE\"][\"FACE_IMAGE_AES_KEY\"]\r\n face_aes_key = face_aes_key_str.decode(\"hex\")\r\n rsa_key_str = settings.VERIFY_STUDENT[\"SOFTWARE_SECURE\"][\"RSA_PUBLIC_KEY\"]\r\n rsa_encrypted_face_aes_key = rsa_encrypt(face_aes_key, rsa_key_str)\r\n\r\n return rsa_encrypted_face_aes_key.encode(\"base64\")", "def encryption_key(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"encryption_key\")", "def encryption_key(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"encryption_key\")", "def _write_encrypted_pem(self, passphrase, tmpfile):\n key = PKey()\n key.generate_key(TYPE_RSA, 1024)\n pem = dump_privatekey(FILETYPE_PEM, key, \"blowfish\", passphrase)\n with open(tmpfile, \"w\") as fObj:\n fObj.write(pem.decode(\"ascii\"))\n return tmpfile", "def encrypt(self, s):\n public_key = serialization.load_pem_public_key(\n self.key.encode('utf-8'),\n backend=default_backend())\n\n encrypted = public_key.encrypt(\n s.encode('utf-8'),\n padding.OAEP(\n mgf=padding.MGF1(algorithm=hashes.SHA256()),\n algorithm=hashes.SHA256(),\n label=None))\n # enc = bytes(encrypted).decode(\"utf-8\")\n return str(encrypted)", "def encryption_key(self) -> typing.Optional[aws_cdk.aws_kms.IKey]:\n return self._values.get('encryption_key')", "def test_set_private_key_setter_encrypted_pem(self) -> None:\n\n expected = self.pem_private_key.decode()\n\n encryptor = DataEncryption()\n encryptor.set_private_key(\n self.encrypted_pem_private_key, password=self.private_key_password\n )\n\n # pylint: disable=protected-access\n actual = encryptor._loaded_private_key.private_bytes(\n serialization.Encoding.PEM,\n serialization.PrivateFormat.PKCS8,\n serialization.NoEncryption(),\n ).decode()\n\n self.assertEqual(expected, actual)", "def get_encoded(self):\n return self.key", "def pem_armor_certificate(certificate):\n\n return asymmetric.dump_certificate(certificate)", "def encode(\n self,\n key: KeyInterface,\n payload: Union[bytes, str, dict],\n footer: Union[bytes, str, dict] = b\"\",\n implicit_assertion: Union[bytes, str] = b\"\",\n nonce: bytes = b\"\",\n serializer: Any = json,\n exp: int = 0,\n ) -> bytes:\n\n if not isinstance(payload, (bytes, str, dict)):\n raise ValueError(\"payload should be bytes, str or dict.\")\n\n res: Union[bytes, str]\n bp: bytes\n if isinstance(payload, dict):\n if not serializer:\n raise ValueError(\"serializer should be specified for the payload object.\")\n try:\n if not callable(serializer.dumps):\n raise ValueError(\"serializer should have dumps().\")\n except AttributeError:\n raise ValueError(\"serializer should have dumps().\")\n except Exception:\n raise\n try:\n payload = self._set_registered_claims(payload, exp)\n res = serializer.dumps(payload)\n bp = res if isinstance(res, bytes) else res.encode(\"utf-8\")\n except Exception as err:\n raise ValueError(\"Failed to serialize the payload.\") from err\n else:\n bp = payload if isinstance(payload, bytes) else payload.encode(\"utf-8\")\n\n bf: bytes\n if isinstance(footer, dict):\n if not serializer:\n raise ValueError(\"serializer should be specified for the footer object.\")\n try:\n if not callable(serializer.dumps):\n raise ValueError(\"serializer should have dumps().\")\n except AttributeError:\n raise ValueError(\"serializer should have dumps().\")\n except Exception:\n raise\n try:\n res = serializer.dumps(footer)\n bf = res if isinstance(res, bytes) else res.encode(\"utf-8\")\n except Exception as err:\n raise ValueError(\"Failed to serialize the footer.\") from err\n else:\n bf = footer if isinstance(footer, bytes) else footer.encode(\"utf-8\")\n\n bi = implicit_assertion if isinstance(implicit_assertion, bytes) else implicit_assertion.encode(\"utf-8\")\n\n if key.purpose == \"local\":\n return key.encrypt(bp, bf, bi, nonce)\n\n sig = key.sign(bp, bf, bi)\n token = key.header + base64url_encode(bp + sig)\n if bf:\n token += b\".\" + base64url_encode(bf)\n return token", "def public_key(self):\n keyfile = self._get_field('System', 'keyfile')\n return join(self.key_path, keyfile)", "def write_keys(path, keys):\n p_keys = pickle.dumps(keys)\n b_keys = base64.b64encode(p_keys)\n with open(path, \"wb+\") as walletfile:\n walletfile.write(b_keys)", "def generate_key():\r\n # generating key\r\n key = Fernet.generate_key()\r\n\r\n key_dir = os.path.join(os.path.dirname(__file__), \"resources/key\")\r\n\r\n # writing key in file\r\n with open(key_dir, \"wb\") as keyFile:\r\n keyFile.write(key)", "def write_key(self, keyfile_name):\n\n print(self.key)\n with open(keyfile_name, 'wb') as f:\n f.write(self.key)", "def _encode_key(self, key):\n return key.encode() if isinstance(key, str) else key", "def save_rsa_private_key(private_key: RSAPrivateKeyWithSerialization, file_path: str, password: str = None,\n encoding: Encoding = Encoding.PEM) -> None:\n if password:\n if isinstance(password, str):\n password_bytes = password.encode('utf-8')\n else:\n password_bytes = password\n enc = serialization.BestAvailableEncryption(password=password_bytes) if password else serialization.NoEncryption()\n pem_data = private_key.private_bytes(encoding, serialization.PrivateFormat.PKCS8, enc)\n with open(file_path, 'wb') as f:\n f.write(pem_data)", "def _encode_key(self, key: str) -> str:\n return key", "def get_PEM(self):\n\n return self.get_POW().pemWritePublic()", "def encryption_key(self) -> typing.Optional[aws_cdk.aws_kms.IKey]:\n return jsii.get(self, \"encryptionKey\")", "def encryption_key(self) -> typing.Optional[aws_cdk.aws_kms.IKey]:\n return jsii.get(self, \"encryptionKey\")", "def private_key(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"private_key\")", "def private_key(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"private_key\")", "def raw(self) -> bytes:\n return bytes(self._verify_key)", "def test_set_public_key_setter_pem_str(self) -> None:\n\n expected = self.pem_public_key.decode()\n\n encryptor = DataEncryption()\n encryptor.set_public_key(self.pem_public_key.decode())\n\n # pylint: disable=protected-access\n actual = encryptor._loaded_public_key.public_bytes(\n serialization.Encoding.PEM, serialization.PublicFormat.PKCS1\n ).decode()\n\n self.assertEqual(expected, actual)", "def generate_encrypted_key(key, encryption_key):\n pub_enc_key = RSA.importKey(encryption_key)\n # RSA encryption protocol according to PKCS#1 OAEP\n cipher = PKCS1_OAEP.new(pub_enc_key)\n return cipher.encrypt(key)", "def get_private_key(self) -> str:\n raise NotImplementedError(\"Please implement your own get_public_key() method\")", "def wrap_rsa_key(public_key: RSAPublicKey, private_key_bytes: bytes) -> bytes:\n wrapped_key = public_key.encrypt(\n private_key_bytes,\n padding.OAEP(\n mgf=padding.MGF1(algorithm=hashes.SHA1()),\n algorithm=hashes.SHA1(),\n label=None,\n ),\n )\n encoded_wrapped_key = base64.b64encode(wrapped_key)\n return encoded_wrapped_key", "def get_verifying_key(private_key):\n return private_key.get_verifying_key().to_pem().decode('ascii')", "def generate_key():\n key = Fernet.generate_key()\n with open(\"pass.key\", \"wb\") as key_file:\n key_file.write(key)", "def key_encryption_key_identity(self) -> Optional[pulumi.Input['ClusterPropertiesKeyEncryptionKeyIdentityArgs']]:\n return pulumi.get(self, \"key_encryption_key_identity\")", "def key(self):\n return self._key.decode('utf-8')", "def WriteKey(self, key, version_number, encrypter=None):\n key = str(key)\n if encrypter:\n key = encrypter.Encrypt(key) # encrypt key info before outputting\n self.dict[str(version_number)] = key", "def to_json(self) -> Dict[str, Union[List[int], List[str]]]:\n fmt = serialization.PublicFormat.SubjectPublicKeyInfo\n obj: Dict[str, Union[List[int], List[str]]] = {}\n lst = []\n\n for pubkey in self.ringv2.values():\n try:\n pubbytes = pubkey.public_bytes(encoding=serialization.Encoding.DER, format=fmt)\n lst.append(pubbytes)\n except Exception as ex:\n logger.error(\"Could not serialize key: %s\", str(ex))\n\n obj[\"pubkeys\"] = [base64.b64encode(pubkey).decode(\"ascii\") for pubkey in lst]\n obj[\"keyids\"] = list(self.ringv2.keys())\n return obj", "def ToString(self, full_key_pair=True):\r\n mod = _NumToB64(self.keypair.n)\r\n exp = '.' + _NumToB64(self.keypair.e)\r\n private_exp = ''\r\n if full_key_pair and self.keypair.d:\r\n private_exp = '.' + _NumToB64(self.keypair.d)\r\n return 'RSA.' + mod + exp + private_exp", "def encode_csr(self, csr):\n return csr.public_bytes(\n serialization.Encoding.PEM,\n ).decode(encoding='UTF-8')", "def to_pem(self, encoding=\"pem\"):\n return self.publicArea.to_pem(encoding)", "def save(self):\n if not self.fileKey:\n log.error(\"attempted to save a closed wallet\")\n return\n encrypted = self.fileKey.encrypt(tinyjson.dump(self).encode()).hex()\n w = tinyjson.dump({\n \"keyparams\": self.fileKey.params(),\n \"wallet\": encrypted,\n })\n helpers.saveFile(self.path, w)", "async def server_public_key(self) -> bytes:\n raise NotImplementedError", "def encode_public_key(value: PublicKey) -> bytes:\n return bytes([value.algo.value]) + value.pbk", "def encrypt(data, key):\n data = six.ensure_binary(data)\n data = privy.hide(secret=data, password=key)\n data = six.ensure_text(data)\n return data", "def serialize(self):\n byte_array = bytearray()\n header = (\n self.sequence_number | (1 << 63)\n if self.type == KeyType.PUT\n else self.sequence_number\n )\n # append header first\n byte_array.extend(byte_utils.integer_to_n_bytes_array(header, 8))\n pickle_key = pickle.dumps(self.key)\n # key length\n byte_array.extend(byte_utils.integer_to_four_bytes_array(len(pickle_key)))\n # key byte array\n byte_array.extend(pickle_key)\n # it is a put operation, value is needed\n if self.type == KeyType.PUT:\n pickle_value = pickle.dumps(self.value)\n # value length\n byte_array.extend(byte_utils.integer_to_four_bytes_array(len(pickle_value)))\n # value byte array\n byte_array.extend(pickle_value)\n return bytes(byte_array)", "def generate_keystream(self):", "def save_rsa_public_key(public_key: RSAPublicKey, file_path: str, encoding: Encoding = Encoding.PEM) -> None:\n pem_data = public_key.public_bytes(encoding, serialization.PublicFormat.PKCS1)\n with open(file_path, 'wb') as f:\n f.write(pem_data)", "def get_PEM(self):\n\n return self.get_POW().pemWritePrivate()", "def test_set_private_key_setter_encrypted_pem_str_password(self) -> None:\n\n expected = self.pem_private_key.decode()\n\n encryptor = DataEncryption()\n encryptor.set_private_key(\n self.encrypted_pem_private_key, password=self.private_key_password.decode()\n )\n\n # pylint: disable=protected-access\n actual = encryptor._loaded_private_key.private_bytes(\n serialization.Encoding.PEM,\n serialization.PrivateFormat.PKCS8,\n serialization.NoEncryption(),\n ).decode()\n\n self.assertEqual(expected, actual)", "def generate_key():\n key = Fernet.generate_key()\n with open(\"Secret.key\",\"wb\")as key_file:\n key_file.write(key)", "def crypt_key(self):\n return self._crypt_key", "def encrypt(self, data):\n\n key_public = RsaPublicKey.Read(self.crypt_public)\n return b64encode(key_public.Encrypt(data))", "def encode_certificate(self, cert):\n return cert.public_bytes(\n serialization.Encoding.PEM,\n ).decode(encoding='UTF-8')", "def get_pem(bucket_name: str, bucket_key: str) -> bytes:\n s3 = boto3.resource(\"s3\")\n s3.Bucket(bucket_name).download_file(bucket_key, \"/tmp/key.pem\")\n with open(\"/tmp/key.pem\", \"rb\") as f:\n return f.read()", "def rsa_private_key_pkcs1_to_pkcs8(pkcs1_key):\n algorithm = RsaAlgorithmIdentifier()\n algorithm[\"rsaEncryption\"] = RSA_ENCRYPTION_ASN1_OID\n\n pkcs8_key = PKCS8PrivateKey()\n pkcs8_key[\"version\"] = 0\n pkcs8_key[\"privateKeyAlgorithm\"] = algorithm\n pkcs8_key[\"privateKey\"] = pkcs1_key\n\n return encoder.encode(pkcs8_key)", "def generate_key(self):\n self.key = Fernet.generate_key()\n with open(\"secret.key\", \"wb\") as key_file:\n key_file.write(self.key)", "def cert_to_pem(cert):\n return cert.public_bytes(Encoding.PEM)", "def test_set_private_key_setter_pem_str(self) -> None:\n\n expected = self.pem_private_key.decode()\n\n encryptor = DataEncryption()\n encryptor.set_private_key(self.pem_private_key.decode())\n\n # pylint: disable=protected-access\n actual = encryptor._loaded_private_key.private_bytes(\n serialization.Encoding.PEM,\n serialization.PrivateFormat.PKCS8,\n serialization.NoEncryption(),\n ).decode()\n\n self.assertEqual(expected, actual)", "def snapshot_encryption_key(self) -> 'outputs.CustomerEncryptionKeyResponse':\n return pulumi.get(self, \"snapshot_encryption_key\")", "def rsa_public_key_pkcs1_to_pkcs8(pkcs1_key):\n algorithm = RsaAlgorithmIdentifier()\n algorithm[\"rsaEncryption\"] = RSA_ENCRYPTION_ASN1_OID\n\n pkcs8_key = PublicKeyInfo()\n pkcs8_key[\"algorithm\"] = algorithm\n pkcs8_key[\"publicKey\"] = univ.BitString.fromOctetString(pkcs1_key)\n\n return encoder.encode(pkcs8_key)", "def test_set_public_key(self) -> None:\n\n expected = self.pem_public_key.decode()\n\n encryptor = DataEncryption(public_key=self.pem_public_key)\n\n # pylint: disable=protected-access\n actual = encryptor._loaded_public_key.public_bytes(\n serialization.Encoding.PEM, serialization.PublicFormat.PKCS1\n ).decode()\n\n self.assertEqual(expected, actual)" ]
[ "0.7412692", "0.68068147", "0.6512063", "0.64399076", "0.6427828", "0.6327557", "0.6320969", "0.6244174", "0.6185736", "0.61740416", "0.61382186", "0.61248946", "0.6111784", "0.6021915", "0.60145104", "0.5913322", "0.58946526", "0.5891813", "0.5877754", "0.58637", "0.58601266", "0.58569574", "0.5839168", "0.580532", "0.57868207", "0.5781909", "0.577932", "0.5775447", "0.57231957", "0.57191354", "0.56761086", "0.56628245", "0.5656347", "0.5654487", "0.5650761", "0.5647971", "0.5645163", "0.5621641", "0.56189597", "0.5579096", "0.5578915", "0.5557507", "0.5549879", "0.5544318", "0.5520393", "0.5520393", "0.55182254", "0.55075634", "0.5507179", "0.55034155", "0.5494018", "0.5479648", "0.5476862", "0.54686624", "0.54662794", "0.5456885", "0.54335475", "0.54283774", "0.54208994", "0.54087776", "0.5406724", "0.54065603", "0.54065603", "0.539869", "0.539869", "0.5398529", "0.5398179", "0.5392668", "0.5389589", "0.5388269", "0.53804797", "0.53783745", "0.53763384", "0.5372093", "0.536983", "0.53646827", "0.5364198", "0.534927", "0.5348664", "0.5348041", "0.53477466", "0.53397346", "0.53233266", "0.53175956", "0.5313378", "0.5310482", "0.5298668", "0.5286863", "0.5276021", "0.5269049", "0.52628976", "0.5254872", "0.52513486", "0.5250286", "0.5239706", "0.5229767", "0.5229175", "0.5213243", "0.52114785", "0.52111006" ]
0.6648909
2
Serialize certificate in PEM format.
def cert_to_pem(cert): return cert.public_bytes(Encoding.PEM)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def encode_certificate(self, cert):\n return cert.public_bytes(\n serialization.Encoding.PEM,\n ).decode(encoding='UTF-8')", "def pem_armor_certificate(certificate):\n\n return asymmetric.dump_certificate(certificate)", "def serialize_certificate(\n certificate: Certificate, encoding: serialization.Encoding\n) -> bytes:\n try:\n cert_bytes = certificate.public_bytes(encoding)\n except Exception as err:\n raise X509CertificateError(\n 'Could not get bytes from object: {}'.format(str(err))\n )\n\n return cert_bytes", "def get_cert_print_bytes(cert):\n #collect PEM bytes\n cert_bytes = cert.decode(\"utf-8\")\n cert_bytes += '\\n'\n\n #collect certificate text\n cert_bytes += certs_handler.get_cert_content(cert).decode(\"utf-8\")\n cert_bytes += '\\n'\n\n #contains both PEM and certificate text\n return cert_bytes", "def _build_pem(tls_cert):\n pem = ()\n if tls_cert.intermediates:\n for c in tls_cert.intermediates:\n pem = pem + (c,)\n if tls_cert.certificate:\n pem = pem + (tls_cert.certificate,)\n if tls_cert.private_key:\n pem = pem + (tls_cert.private_key,)\n return \"\\n\".join(pem)", "def to_pem(self, encoding=\"pem\"):\n return public_to_pem(self, encoding)", "def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")", "def encode_csr(self, csr):\n return csr.public_bytes(\n serialization.Encoding.PEM,\n ).decode(encoding='UTF-8')", "def to_pem(self, encoding=\"pem\"):\n return self.publicArea.to_pem(encoding)", "def get_own_cert_as_string(self):\n certpath, cert, certstr = self.get_own_cert()\n return certstr", "def get_private_key_in_pem(self):\n serialized_private = self.private_key_obj.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()\n )\n return serialized_private", "def Certificate(self) -> _n_8_t_0:", "def Certificate(self) -> _n_8_t_0:", "def get_public_key_in_pem(self):\n serialized_public = self.public_key_obj.public_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PublicFormat.SubjectPublicKeyInfo\n )\n return serialized_public", "def certificate(self) -> str:\n return pulumi.get(self, \"certificate\")", "def get_PEM(self):\n\n return self.get_POW().pemWrite()", "def convert_key_to_pem ( key_filename, output_filename ) :\n cmd = 'openssl rsa -in ' + key_filename + ' -outform PEM -out ' + output_filename\n return subprocess.call( cmd, shell = True )", "def certificate(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"certificate\")", "def serializeParameters(parameters):\n\treturn parameters.parameter_bytes(\n\t\tencoding=serialization.Encoding.PEM,\n\t\tformat=serialization.ParameterFormat.PKCS3\n\t)", "def serialize(self, data):\n assert self._key is not None\n assert self._cert is not None\n try:\n data = self._serialize(data)\n signature = b64encode(self._key.sign(data, self._digest))\n signer = self._cert.get_id()\n return self._serialize(dict(data=data,\n signer=signer,\n signature=signature))\n except Exception, exc:\n raise SecurityError(\"Unable to serialize: %r\" % (exc, ))", "def get_PEM(self):\n\n return self.get_POW().pemWritePrivate()", "def load_pem_x509_certificate(data):\n return _x509.load_pem_x509_certificate(data, _backends.default_backend())", "def serializePublicKey(public_key):\n\treturn public_key.public_bytes(\n\t\tencoding=serialization.Encoding.PEM,\n\t\tformat=serialization.PublicFormat.SubjectPublicKeyInfo\n\t)", "def req_to_pem(req):\n return req.public_bytes(Encoding.PEM)", "def get_cert_content(certificate):\n cert_object = crypto.load_certificate(crypto.FILETYPE_PEM, certificate)\n cert_content = crypto.dump_certificate(crypto.FILETYPE_TEXT, cert_object)\n return cert_content", "def pfx2pem(input_file, output_file, passphrase=None):\n pfx = open(input_file, 'rb').read()\n p12 = crypto.load_pkcs12(pfx, passphrase)\n pem = crypto.dump_certificate(crypto.FILETYPE_PEM, p12.get_certificate())\n pem += crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey())\n open(output_file, 'wb').write(pem)", "def encode_key(self, key):\n return key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption(),\n ).decode(encoding='UTF-8')", "def certificate_body(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate_body\")", "def get_PEM(self):\n\n return self.get_POW().pemWritePublic()", "def _sign_cert(self, cert):\n with open(self._get_key_link(self.commonname), 'r') as private_file:\n data = private_file.read()\n pkey = crypto.load_privatekey(crypto.FILETYPE_PEM,\n data)\n cert.sign(pkey, 'sha256')", "def tls_certificate_chain_pem_path(tls_certificate):\n with tls_certificate.private_key_and_cert_chain_pem.tempfile() as cert_pem:\n yield cert_pem", "def save_certificate(self, certificate_msg, cert_path, chain_path):\n # pylint: disable=no-self-use\n cert_chain_abspath = None\n cert_fd, cert_file = le_util.unique_file(cert_path, 0o644)\n cert_fd.write(certificate_msg.certificate.as_pem())\n cert_fd.close()\n logging.info(\n \"Server issued certificate; certificate written to %s\", cert_file)\n\n if certificate_msg.chain:\n chain_fd, chain_fn = le_util.unique_file(chain_path, 0o644)\n for cert in certificate_msg.chain:\n chain_fd.write(cert.to_pem())\n chain_fd.close()\n\n logging.info(\"Cert chain written to %s\", chain_fn)\n\n # This expects a valid chain file\n cert_chain_abspath = os.path.abspath(chain_fn)\n\n return os.path.abspath(cert_file), cert_chain_abspath", "def get_der(self):\n return OpenSSL.crypto.dump_certificate(\n OpenSSL.crypto.FILETYPE_ASN1, self._cert)", "def serializePrivateKey(private_key):\n\treturn private_key.private_bytes(\n\t\tencoding=serialization.Encoding.PEM,\n\t\tformat=serialization.PrivateFormat.PKCS8,\n\t\tencryption_algorithm=serialization.NoEncryption()\n\t)", "def save(self, cert_path: Union[Path, str], key_path: Union[Path, str]):\n cert_path, key_path = Path(cert_path), Path(key_path)\n\n cert_path.parent.mkdir(parents=True, exist_ok=True)\n with cert_path.open(\"wb\") as file:\n file.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, self.cert))\n\n key_path.parent.mkdir(parents=True, exist_ok=True)\n with key_path.open(\"wb\") as file:\n file.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, self.key))", "def save(self, save_dir):\n path = os.path.join(save_dir, self.name + \".pem\")\n with open(path, \"wb\") as f:\n f.write(self.public_key)", "def jwt_key_to_pem(self, key_json_dict):\n pub_key = RSAAlgorithm.from_jwk(json.dumps(key_json_dict))\n return pub_key.public_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PublicFormat.SubjectPublicKeyInfo,\n )", "def write_certificate_to_file(\n certificate: Certificate,\n dest_file: BinaryIO,\n encoding: serialization.Encoding,\n) -> None:\n\n try:\n cert_bytes = serialize_certificate(certificate, encoding)\n dest_file.write(cert_bytes)\n except Exception as err:\n raise X509CertificateError(\n 'Error writing certificate to file: {}'.format(str(err))\n )", "def pem(ctx):\n click.echo(_get_pem(ctx().source))", "def key_to_pem(key, password=None):\n if password:\n enc = BestAvailableEncryption(as_bytes(password))\n else:\n enc = NoEncryption()\n return key.private_bytes(Encoding.PEM, PrivateFormat.PKCS8, enc)", "def pfx2pem_memmory(input_file):\n pfx = open(input_file, 'rb').read()\n p12 = crypto.load_pkcs12(pfx)\n pem = crypto.dump_certificate(crypto.FILETYPE_PEM, p12.get_certificate())\n pem += crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey())\n return pem", "def certificate(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"certificate\")", "def getCertificate(self, req):\n return dumpCert(createCert(parseCertReqStr(req), self._cert,\n self._key))", "def extract_ca_crt_bytes_from_pem(pem_content):\n begin_search = pem_content.find(constants.BEGIN_CERTIFICATE_MARKER)\n if begin_search < 0:\n raise exception.InvalidKubernetesCA\n\n end_search = pem_content.find(constants.END_CERTIFICATE_MARKER)\n if end_search < 0:\n raise exception.InvalidKubernetesCA\n\n end_search += len(constants.END_CERTIFICATE_MARKER)\n base64_crt = base64.encode_as_text(pem_content[begin_search:end_search])\n return base64_crt", "def get_own_cert_as_openssl_object(self):\n# _log.debug(\"get_own_cert_as_openssl_object: node_name={}\".format(self.node_name))\n certpath, cert, certstr = self.get_own_cert()\n return cert", "def ec_private_pem_to_private_bin(pem):\n return \"\".join(pem.split(\"\\n\")[1:-2]).decode(\"BASE64\")", "def cert_info(user, course):\r\n if not course.may_certify():\r\n return {}\r\n\r\n return _cert_info(user, course, certificate_status_for_student(user, course.id))", "def tls_ca_certificate_pem_path(ca):\n with ca.cert_pem.tempfile() as ca_cert_pem:\n yield ca_cert_pem", "def get_certificate(self, cert_id):\r\n return self.ssl.getObject(id=cert_id)", "def cert(self):\n return self._cert", "def serialize(self,writer: SerializationWriter) -> None:\n if not writer:\n raise TypeError(\"writer cannot be null.\")\n super().serialize(writer)\n writer.write_str_value(\"certificateData\", self.certificate_data)\n writer.write_str_value(\"developerId\", self.developer_id)\n writer.write_str_value(\"keyId\", self.key_id)\n writer.write_str_value(\"serviceId\", self.service_id)", "def get_pem(bucket_name: str, bucket_key: str) -> bytes:\n s3 = boto3.resource(\"s3\")\n s3.Bucket(bucket_name).download_file(bucket_key, \"/tmp/key.pem\")\n with open(\"/tmp/key.pem\", \"rb\") as f:\n return f.read()", "def pem(b, name):\r\n s1 = b2a_base64(b)[:-1] # remove terminating \\n\r\n s2 = \"\"\r\n while s1:\r\n s2 += s1[:64] + \"\\n\"\r\n s1 = s1[64:]\r\n s = (\"-----BEGIN %s-----\\n\" % name) + s2 + \\\r\n (\"-----END %s-----\\n\" % name) \r\n return s", "def prepare_certificate_file(certificate: str) -> str:\n certificate_file = NamedTemporaryFile(delete=False)\n certificate_path = certificate_file.name\n certificate_file.write(bytes(certificate, 'utf-8'))\n certificate_file.close()\n demisto.debug('Successfully preparing the certificate')\n return certificate_path", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate_chain(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"certificate_chain\")", "def get_pvk_pem_from_bytes(pvk: bytes) -> bytes:\n sk = ecdsa.SigningKey.from_string(pvk, curve=CURVE)\n\n return sk.to_pem()", "def get_ssl_certificate():", "def _generate_ca_cert(path, pkey):\n crt = _make_base_cert(pkey, 5000, socket.gethostname(),\n random.randrange(0, 2**64))\n crt.set_issuer(crt.get_subject())\n crt.sign(pkey, 'sha256')\n\n data = crypto.dump_certificate(crypto.FILETYPE_PEM, crt)\n open(path, 'wb').write(data)", "def get_private_key_in_der(self):\n serialized_private = self.private_key_obj.private_bytes(\n encoding=serialization.Encoding.DER,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()\n )\n return serialized_private", "def add_certificate(self, certificate):\r\n return self.ssl.createObject(certificate)", "def to_json(self) -> Dict[str, Union[List[int], List[str]]]:\n fmt = serialization.PublicFormat.SubjectPublicKeyInfo\n obj: Dict[str, Union[List[int], List[str]]] = {}\n lst = []\n\n for pubkey in self.ringv2.values():\n try:\n pubbytes = pubkey.public_bytes(encoding=serialization.Encoding.DER, format=fmt)\n lst.append(pubbytes)\n except Exception as ex:\n logger.error(\"Could not serialize key: %s\", str(ex))\n\n obj[\"pubkeys\"] = [base64.b64encode(pubkey).decode(\"ascii\") for pubkey in lst]\n obj[\"keyids\"] = list(self.ringv2.keys())\n return obj", "def write_cert(filename, content):\r\n with open(filename, 'w') as cert_file:\r\n cert_file.write(content)", "def tls_certificate_private_key_pem_path(tls_certificate):\n with tls_certificate.private_key_pem.tempfile() as cert_key_pem:\n yield cert_key_pem", "def test_warn_self_signed(self):\n config_dir = self.mktemp()\n os.mkdir(config_dir)\n with open(os.path.join(config_dir, \"cert.pem\"), \"w\") as f:\n f.write(\n \"\"\"-----BEGIN CERTIFICATE-----\nMIID6DCCAtACAws9CjANBgkqhkiG9w0BAQUFADCBtzELMAkGA1UEBhMCVFIxDzAN\nBgNVBAgMBsOHb3J1bTEUMBIGA1UEBwwLQmHFn21ha8OnxLExEjAQBgNVBAMMCWxv\nY2FsaG9zdDEcMBoGA1UECgwTVHdpc3RlZCBNYXRyaXggTGFiczEkMCIGA1UECwwb\nQXV0b21hdGVkIFRlc3RpbmcgQXV0aG9yaXR5MSkwJwYJKoZIhvcNAQkBFhpzZWN1\ncml0eUB0d2lzdGVkbWF0cml4LmNvbTAgFw0xNzA3MTIxNDAxNTNaGA8yMTE3MDYx\nODE0MDE1M1owgbcxCzAJBgNVBAYTAlRSMQ8wDQYDVQQIDAbDh29ydW0xFDASBgNV\nBAcMC0JhxZ9tYWvDp8SxMRIwEAYDVQQDDAlsb2NhbGhvc3QxHDAaBgNVBAoME1R3\naXN0ZWQgTWF0cml4IExhYnMxJDAiBgNVBAsMG0F1dG9tYXRlZCBUZXN0aW5nIEF1\ndGhvcml0eTEpMCcGCSqGSIb3DQEJARYac2VjdXJpdHlAdHdpc3RlZG1hdHJpeC5j\nb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDwT6kbqtMUI0sMkx4h\nI+L780dA59KfksZCqJGmOsMD6hte9EguasfkZzvCF3dk3NhwCjFSOvKx6rCwiteo\nWtYkVfo+rSuVNmt7bEsOUDtuTcaxTzIFB+yHOYwAaoz3zQkyVW0c4pzioiLCGCmf\nFLdiDBQGGp74tb+7a0V6kC3vMLFoM3L6QWq5uYRB5+xLzlPJ734ltyvfZHL3Us6p\ncUbK+3WTWvb4ER0W2RqArAj6Bc/ERQKIAPFEiZi9bIYTwvBH27OKHRz+KoY/G8zY\n+l+WZoJqDhupRAQAuh7O7V/y6bSP+KNxJRie9QkZvw1PSaGSXtGJI3WWdO12/Ulg\nepJpAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAJXEq5P9xwvP9aDkXIqzcD0L8sf8\newlhlxTQdeqt2Nace0Yk18lIo2oj1t86Y8jNbpAnZJeI813Rr5M7FbHCXoRc/SZG\nI8OtG1xGwcok53lyDuuUUDexnK4O5BkjKiVlNPg4HPim5Kuj2hRNFfNt/F2BVIlj\niZupikC5MT1LQaRwidkSNxCku1TfAyueiBwhLnFwTmIGNnhuDCutEVAD9kFmcJN2\nSznugAcPk4doX2+rL+ila+ThqgPzIkwTUHtnmjI0TI6xsDUlXz5S3UyudrE2Qsfz\ns4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg=\n-----END CERTIFICATE-----\"\"\"\n )\n\n config = {\n \"tls_certificate_path\": os.path.join(config_dir, \"cert.pem\"),\n \"tls_fingerprints\": [],\n }\n\n t = TestConfig()\n t.read_config(config, config_dir_path=\"\", data_dir_path=\"\")\n t.read_certificate_from_disk(require_cert_and_key=False)\n\n warnings = self.flushWarnings()\n self.assertEqual(len(warnings), 1)\n self.assertEqual(\n warnings[0][\"message\"],\n (\n \"Self-signed TLS certificates will not be accepted by \"\n \"Synapse 1.0. Please either provide a valid certificate, \"\n \"or use Synapse's ACME support to provision one.\"\n ),\n )", "def to_binary(self):\n c = containerize(exclude_fields(self))\n self.payload = MsgCertificateChain._parser.build(c)\n return self.pack()", "def certificate_body(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"certificate_body\")", "def cert(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cert\")", "def cert(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cert\")", "def to_binary(self):\n c = containerize(exclude_fields(self))\n self.payload = MsgEcdsaCertificate._parser.build(c)\n return self.pack()", "def get_certificate(self, path: Union[bytes, str]) -> str:\n path = _to_bytes_or_null(path)\n certificate = ffi.new(\"char **\")\n ret = lib.Fapi_GetCertificate(self._ctx, path, certificate)\n _chkrc(ret)\n # certificate is guaranteed to be a null-terminated string\n return ffi.string(_get_dptr(certificate, lib.Fapi_Free)).decode()", "def get_public_key_in_der(self):\n serialized_public = self.public_key_obj.public_bytes(\n encoding=serialization.Encoding.DER,\n format=serialization.PublicFormat.SubjectPublicKeyInfo\n )\n return serialized_public", "def as_text(self):\n buf=BIO.MemoryBuffer()\n m2.x509_crl_print(buf.bio_ptr(), self.crl)\n return buf.read_all()", "def certificate_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate_id\")", "def make_cert_for_spki_request(spki_req_b64, serial, ident):\n spki_obj = netscape_spki_from_b64(spki_req_b64)\n if spki_obj is None:\n raise ValueError('Invalid SPKI object')\n\n root_crt = _try_load_ca_cert(cfg.ca_cert_path())\n root_key = _try_load_ca_private_key(cfg.ca_private_key_path())\n crt = _make_base_cert(spki_obj.get_pubkey(), 365, ident, serial)\n crt.set_issuer(root_crt.get_subject())\n crt.sign(root_key, 'sha256')\n return crypto.dump_certificate(crypto.FILETYPE_ASN1, crt)", "def get_ssl_certificate() :", "def raw(self) -> bytes:\n return bytes(self._signing_key)", "def ca_file(tmpdir):\n key = rsa.generate_private_key(public_exponent=65537, key_size=2048)\n public_key = key.public_key()\n\n builder = x509.CertificateBuilder()\n builder = builder.subject_name(\n x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, \"pyopenssl.org\")])\n )\n builder = builder.issuer_name(\n x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, \"pyopenssl.org\")])\n )\n one_day = datetime.timedelta(1, 0, 0)\n builder = builder.not_valid_before(datetime.datetime.today() - one_day)\n builder = builder.not_valid_after(datetime.datetime.today() + one_day)\n builder = builder.serial_number(int(uuid.uuid4()))\n builder = builder.public_key(public_key)\n builder = builder.add_extension(\n x509.BasicConstraints(ca=True, path_length=None),\n critical=True,\n )\n\n certificate = builder.sign(private_key=key, algorithm=hashes.SHA256())\n\n ca_file = tmpdir.join(\"test.pem\")\n ca_file.write_binary(\n certificate.public_bytes(\n encoding=serialization.Encoding.PEM,\n )\n )\n\n return str(ca_file).encode(\"ascii\")", "def get_cert_subject_string_hash(cert):\n try:\n public_bytes = cert.public_bytes(encoding=serialization.Encoding.PEM)\n cert_c = crypto.load_certificate(crypto.FILETYPE_PEM, public_bytes)\n\n # get the subject object from the loaded certificate\n cert_subject = cert_c.get_subject()\n\n # for each component presented on certificate subject,\n # converts the respective name and value for strings and join all\n # together\n subject_attributes = \"\".join(\"/{0:s}={1:s}\".format(name.decode(),\n value.decode())\n for name, value in\n cert_subject.get_components())\n\n # apply the hash function to binary form of the string above and\n # digest it as a hexdecimal value, and take the first 16 bytes.\n hashed_attributes = \\\n hashlib.md5(subject_attributes.encode()).hexdigest()[:16]\n\n LOG.info(\"hashed subject attributes %s from certificate \"\n % hashed_attributes)\n except Exception:\n LOG.exception()\n raise exception.SysinvException(_(\n \"Failed to get certificate subject hash.\"))\n\n return hashed_attributes", "def certificate_id(self) -> str:\n return pulumi.get(self, \"certificate_id\")", "def _sign(self, cert, keypair, certs, crls, flags):\n\n # pylint: disable=W0201\n cms = self.POW_class()\n cms.sign(cert, keypair, self.encode(), certs, crls, self.econtent_oid, flags)\n self.POW = cms", "def handle_pem_extension(oid, _input):\r\n try:\r\n cert = objects.X509(oid)\r\n cert.pem = _input.read()\r\n except (ValueError, TypeError, OSError) as failed_to_init:\r\n raise click.BadParameter(\r\n '[{0}]: File Content can\\'t be parsed or written.\\n {1}'.format(_input.name, _input.read())\r\n ) from failed_to_init", "def sign_certificate(csr):\n unique_filename = str(uuid.uuid4().hex)\n\n file = open(\"./csr_req/%s.csr\" % unique_filename, \"w\")\n file.write(csr.decode(\"utf-8\"))\n file.close()\n\n subprocess.run([\"../ca/scripts/sign.sh\", unique_filename], check=False)\n\n file = open(\"./csr_req/%s.p7b\" % unique_filename, \"r\")\n cert = file.read()\n\n os.remove(\"./csr_req/%s.csr\" % unique_filename)\n os.remove(\"./csr_req/%s.p7b\" % unique_filename)\n\n return cert", "def extract_ca_private_key_bytes_from_pem(pem_content):\n found_marker = False\n for begin_marker in [constants.BEGIN_PRIVATE_KEY_MARKER,\n constants.BEGIN_RSA_PRIVATE_KEY_MARKER]:\n begin_search = pem_content.find(begin_marker)\n if begin_search >= 0:\n found_marker = True\n break\n\n if not found_marker:\n raise exception.InvalidKubernetesCA\n\n found_marker = False\n for end_marker in [constants.END_PRIVATE_KEY_MARKER,\n constants.END_RSA_PRIVATE_KEY_MARKER]:\n end_search = pem_content.find(end_marker)\n if end_search >= 0:\n found_marker = True\n end_search += len(end_marker)\n break\n\n if not found_marker:\n raise exception.InvalidKubernetesCA\n\n base64_key = base64.encode_as_text(pem_content[begin_search:end_search])\n return base64_key", "def save_ca():\n cert_file = os.environ.get('HOME') + '/.cat_installer/ca.pem'\n debug(\"saving cert\")\n with open(cert_file, 'w') as cert:\n cert.write(Config.CA + \"\\n\")", "def _write_encrypted_pem(self, passphrase, tmpfile):\n key = PKey()\n key.generate_key(TYPE_RSA, 1024)\n pem = dump_privatekey(FILETYPE_PEM, key, \"blowfish\", passphrase)\n with open(tmpfile, \"w\") as fObj:\n fObj.write(pem.decode(\"ascii\"))\n return tmpfile", "def privateKey2Text(key:RSA.RsaKey):\n return b58encode(key.exportKey('DER'))", "def cert_file(self):\n return self._get('cert_file')", "def ssl_cert(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"ssl_cert\")", "def save(self, filename, format=FORMAT_PEM):\n bio = BIO.openfile(filename, 'wb')\n if format == FORMAT_PEM:\n return m2.x509_write_pem(bio.bio_ptr(), self.x509)\n elif format == FORMAT_DER:\n return m2.i2d_x509_bio(bio.bio_ptr(), self.x509)\n else:\n raise ValueError(\"Unknown filetype. Must be either FORMAT_PEM or FORMAT_DER\")", "def get_public_key(self):\n# _log.debug(\"get_public_key\")\n certpath, cert, certstr = self.get_own_cert()\n try:\n cert = load_pem_x509_certificate(certstr, default_backend())\n except Exception as err:\n _log.error(\"Failed to load X509 certificate from PEM, err={}\".format(err))\n raise\n return cert.public_key()", "def get_pretty_subject(cert):\n subject = 'subject=' + _get_pretty_name(cert.get_subject())\n issuer = 'issuer=' + _get_pretty_name(cert.get_issuer())\n return subject + '\\n' + issuer + '\\n'" ]
[ "0.7233042", "0.68470275", "0.68383855", "0.6716175", "0.6495609", "0.6100652", "0.60305905", "0.60305905", "0.60305905", "0.60291034", "0.6010482", "0.59719867", "0.59408164", "0.59336966", "0.59336966", "0.5918769", "0.59016013", "0.5878901", "0.58332604", "0.5830136", "0.5819832", "0.5810215", "0.5807481", "0.5775789", "0.5738689", "0.573722", "0.5735881", "0.5715379", "0.57139695", "0.5713399", "0.56998706", "0.5684718", "0.56594867", "0.56393665", "0.5628413", "0.562608", "0.55610263", "0.5527868", "0.5499066", "0.54241186", "0.54039824", "0.5390127", "0.53701776", "0.5355624", "0.5355624", "0.53511095", "0.5344517", "0.5342328", "0.5341142", "0.5291238", "0.5270417", "0.5254527", "0.5254451", "0.5249153", "0.523677", "0.52278763", "0.52075714", "0.52062476", "0.52062476", "0.52062476", "0.52062476", "0.52062476", "0.52062476", "0.5190941", "0.51834595", "0.51702815", "0.51679313", "0.51673263", "0.51649266", "0.5160162", "0.51538414", "0.51528245", "0.5146178", "0.5142989", "0.5116711", "0.5114609", "0.5114609", "0.51076186", "0.5096043", "0.50895566", "0.50873643", "0.5060851", "0.5052612", "0.504981", "0.5043702", "0.5023272", "0.50064707", "0.50049186", "0.4988182", "0.49688137", "0.49592948", "0.4943957", "0.49380836", "0.49190587", "0.49186242", "0.49118", "0.4910356", "0.49055108", "0.49050045", "0.48957327" ]
0.7239738
0
Serialize certificate request in PEM format.
def req_to_pem(req): return req.public_bytes(Encoding.PEM)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cert_to_pem(cert):\n return cert.public_bytes(Encoding.PEM)", "def _build_pem(tls_cert):\n pem = ()\n if tls_cert.intermediates:\n for c in tls_cert.intermediates:\n pem = pem + (c,)\n if tls_cert.certificate:\n pem = pem + (tls_cert.certificate,)\n if tls_cert.private_key:\n pem = pem + (tls_cert.private_key,)\n return \"\\n\".join(pem)", "def encode_certificate(self, cert):\n return cert.public_bytes(\n serialization.Encoding.PEM,\n ).decode(encoding='UTF-8')", "def get_cert_print_bytes(cert):\n #collect PEM bytes\n cert_bytes = cert.decode(\"utf-8\")\n cert_bytes += '\\n'\n\n #collect certificate text\n cert_bytes += certs_handler.get_cert_content(cert).decode(\"utf-8\")\n cert_bytes += '\\n'\n\n #contains both PEM and certificate text\n return cert_bytes", "def getCertificate(self, req):\n return dumpCert(createCert(parseCertReqStr(req), self._cert,\n self._key))", "def serialize_certificate(\n certificate: Certificate, encoding: serialization.Encoding\n) -> bytes:\n try:\n cert_bytes = certificate.public_bytes(encoding)\n except Exception as err:\n raise X509CertificateError(\n 'Could not get bytes from object: {}'.format(str(err))\n )\n\n return cert_bytes", "def pem_armor_certificate(certificate):\n\n return asymmetric.dump_certificate(certificate)", "def serializeParameters(parameters):\n\treturn parameters.parameter_bytes(\n\t\tencoding=serialization.Encoding.PEM,\n\t\tformat=serialization.ParameterFormat.PKCS3\n\t)", "def Certificate(self) -> _n_8_t_0:", "def Certificate(self) -> _n_8_t_0:", "def certificate_body(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate_body\")", "def encode_csr(self, csr):\n return csr.public_bytes(\n serialization.Encoding.PEM,\n ).decode(encoding='UTF-8')", "def certificate_body(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"certificate_body\")", "def get_PEM(self):\n\n return self.get_POW().pemWrite()", "def serialize(self, data):\n assert self._key is not None\n assert self._cert is not None\n try:\n data = self._serialize(data)\n signature = b64encode(self._key.sign(data, self._digest))\n signer = self._cert.get_id()\n return self._serialize(dict(data=data,\n signer=signer,\n signature=signature))\n except Exception, exc:\n raise SecurityError(\"Unable to serialize: %r\" % (exc, ))", "def request_cert():\n\n api_request = shallow_copy(props)\n\n for key in ['ServiceToken', 'Region', 'Tags', 'Route53RoleArn']:\n api_request.pop(key, None)\n\n if 'ValidationMethod' in props:\n if props['ValidationMethod'] == 'DNS':\n\n # Check that we have all the hosted zone information we need to validate\n # before we create the certificate\n for name in set([props['DomainName']] + props.get('SubjectAlternativeNames', [])):\n get_zone_for(name)\n\n del api_request['DomainValidationOptions']\n\n e['PhysicalResourceId'] = acm.request_certificate(\n IdempotencyToken=i_token,\n **api_request\n )['CertificateArn']\n add_tags()", "def get_PEM(self):\n\n return self.get_POW().pemWritePrivate()", "def certificate(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"certificate\")", "def convert_key_to_pem ( key_filename, output_filename ) :\n cmd = 'openssl rsa -in ' + key_filename + ' -outform PEM -out ' + output_filename\n return subprocess.call( cmd, shell = True )", "def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")", "def to_pem(self, encoding=\"pem\"):\n return public_to_pem(self, encoding)", "def make_cert_for_spki_request(spki_req_b64, serial, ident):\n spki_obj = netscape_spki_from_b64(spki_req_b64)\n if spki_obj is None:\n raise ValueError('Invalid SPKI object')\n\n root_crt = _try_load_ca_cert(cfg.ca_cert_path())\n root_key = _try_load_ca_private_key(cfg.ca_private_key_path())\n crt = _make_base_cert(spki_obj.get_pubkey(), 365, ident, serial)\n crt.set_issuer(root_crt.get_subject())\n crt.sign(root_key, 'sha256')\n return crypto.dump_certificate(crypto.FILETYPE_ASN1, crt)", "def pfx2pem(input_file, output_file, passphrase=None):\n pfx = open(input_file, 'rb').read()\n p12 = crypto.load_pkcs12(pfx, passphrase)\n pem = crypto.dump_certificate(crypto.FILETYPE_PEM, p12.get_certificate())\n pem += crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey())\n open(output_file, 'wb').write(pem)", "def serialize(self) -> bytes:\n return json_dumps([req._to_dict() for req in self]).encode()", "def get_private_key_in_pem(self):\n serialized_private = self.private_key_obj.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()\n )\n return serialized_private", "def pem(ctx):\n click.echo(_get_pem(ctx().source))", "def certificate_body(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate_body\")", "def pfx2pem_memmory(input_file):\n pfx = open(input_file, 'rb').read()\n p12 = crypto.load_pkcs12(pfx)\n pem = crypto.dump_certificate(crypto.FILETYPE_PEM, p12.get_certificate())\n pem += crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey())\n return pem", "def certificate(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"certificate\")", "def get_PEM(self):\n\n return self.get_POW().pemWritePublic()", "def to_pem(self, encoding=\"pem\"):\n return self.publicArea.to_pem(encoding)", "def get_public_key_in_pem(self):\n serialized_public = self.public_key_obj.public_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PublicFormat.SubjectPublicKeyInfo\n )\n return serialized_public", "def createCertRequest(pkey, digest=\"sha256\", **name):\n req = crypto.X509Req()\n subj = req.get_subject()\n\n for key, value in name.items():\n setattr(subj, key, value)\n\n req.set_pubkey(pkey)\n req.sign(pkey, digest)\n return req", "def certificate(self) -> str:\n return pulumi.get(self, \"certificate\")", "def to_binary(self):\n c = containerize(exclude_fields(self))\n self.payload = MsgCertificateChain._parser.build(c)\n return self.pack()", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def load_pem_x509_certificate(data):\n return _x509.load_pem_x509_certificate(data, _backends.default_backend())", "def get_own_cert_as_string(self):\n certpath, cert, certstr = self.get_own_cert()\n return certstr", "def tls_certificate_chain_pem_path(tls_certificate):\n with tls_certificate.private_key_and_cert_chain_pem.tempfile() as cert_pem:\n yield cert_pem", "def read_pem(input):\n data = []\n state = 0\n for line in input.split('\\n'):\n if state == 0:\n if line.startswith('-----BEGIN'):\n state = 1\n elif state == 1:\n if line.startswith('-----END'):\n state = 2\n else:\n data.append(line)\n elif state == 2:\n break\n if state != 2:\n raise ValueError, 'No PEM encoded input found'\n data = ''.join(data)\n data = data.decode('base64')\n return data", "def cert_info(user, course):\r\n if not course.may_certify():\r\n return {}\r\n\r\n return _cert_info(user, course, certificate_status_for_student(user, course.id))", "def serializePrivateKey(private_key):\n\treturn private_key.private_bytes(\n\t\tencoding=serialization.Encoding.PEM,\n\t\tformat=serialization.PrivateFormat.PKCS8,\n\t\tencryption_algorithm=serialization.NoEncryption()\n\t)", "def raw(self) -> bytes:\n return bytes(self._signing_key)", "def encode_key(self, key):\n return key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption(),\n ).decode(encoding='UTF-8')", "def to_bytes(self) -> bytes:\n proposal_info_in_dict = vars(self)\n proposal_info_in_dict[\"id\"] = bytes.hex(proposal_info_in_dict[\"id\"])\n proposal_info_in_dict[\"proposer\"] = str(proposal_info_in_dict[\"proposer\"])\n return json_dumps(proposal_info_in_dict).encode()", "def handle_pem_extension(oid, _input):\r\n try:\r\n cert = objects.X509(oid)\r\n cert.pem = _input.read()\r\n except (ValueError, TypeError, OSError) as failed_to_init:\r\n raise click.BadParameter(\r\n '[{0}]: File Content can\\'t be parsed or written.\\n {1}'.format(_input.name, _input.read())\r\n ) from failed_to_init", "def create_x509_req(privkey, subject_info):\n builder = x509.CertificateSigningRequestBuilder()\n builder = builder.subject_name(subject_info.get_name())\n builder = subject_info.install_extensions(builder)\n\n # create final request\n req = builder.sign(private_key=privkey, algorithm=SHA256(), backend=get_backend())\n return req", "def cert(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cert\")", "def cert(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cert\")", "def get_ssl_certificate():", "def json_bytes_signed(request) -> bytes:\n return get_test_data(request, __name__, \"config_signed.json\")", "def certificate_chain(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"certificate_chain\")", "def get_ssl_certificate() :", "def get_der(self):\n return OpenSSL.crypto.dump_certificate(\n OpenSSL.crypto.FILETYPE_ASN1, self._cert)", "def jwt_key_to_pem(self, key_json_dict):\n pub_key = RSAAlgorithm.from_jwk(json.dumps(key_json_dict))\n return pub_key.public_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PublicFormat.SubjectPublicKeyInfo,\n )", "def pem(b, name):\r\n s1 = b2a_base64(b)[:-1] # remove terminating \\n\r\n s2 = \"\"\r\n while s1:\r\n s2 += s1[:64] + \"\\n\"\r\n s1 = s1[64:]\r\n s = (\"-----BEGIN %s-----\\n\" % name) + s2 + \\\r\n (\"-----END %s-----\\n\" % name) \r\n return s", "def get_cert_content(certificate):\n cert_object = crypto.load_certificate(crypto.FILETYPE_PEM, certificate)\n cert_content = crypto.dump_certificate(crypto.FILETYPE_TEXT, cert_object)\n return cert_content", "def serialize(self):\n return json.dumps(self.request_data)", "def certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"certificates\")", "def certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"certificates\")", "def serialize(self,writer: SerializationWriter) -> None:\n if not writer:\n raise TypeError(\"writer cannot be null.\")\n super().serialize(writer)\n writer.write_str_value(\"certificateData\", self.certificate_data)\n writer.write_str_value(\"developerId\", self.developer_id)\n writer.write_str_value(\"keyId\", self.key_id)\n writer.write_str_value(\"serviceId\", self.service_id)", "def to_binary(self):\n c = containerize(exclude_fields(self))\n self.payload = MsgCertificateChainDep._parser.build(c)\n return self.pack()", "def serialize(self):\n\n data = {}\n\n data[\"verified\"] = True\n\n return data", "def key_to_pem(key, password=None):\n if password:\n enc = BestAvailableEncryption(as_bytes(password))\n else:\n enc = NoEncryption()\n return key.private_bytes(Encoding.PEM, PrivateFormat.PKCS8, enc)", "def submit_mdmcert_request(email: str, csr_pem: str,\n encrypt_with_pem: str, api_key: str = MDMCERT_API_KEY) -> Dict:\n base64_csr = b64encode(csr_pem)\n base64_recipient = b64encode(encrypt_with_pem)\n\n mdmcert_dict = {\n 'csr': base64_csr.decode('utf8'),\n 'email': email,\n 'key': api_key,\n 'encrypt': base64_recipient.decode('utf8'),\n }\n\n res = requests.post(\n MDMCERT_REQ_URL,\n data=json.dumps(mdmcert_dict).encode('utf8'),\n headers={\n 'Content-Type': 'application/json',\n 'User-Agent': 'coMmanDMent/0.1',\n })\n\n return res.json()", "def serializePublicKey(public_key):\n\treturn public_key.public_bytes(\n\t\tencoding=serialization.Encoding.PEM,\n\t\tformat=serialization.PublicFormat.SubjectPublicKeyInfo\n\t)", "def get_own_cert_as_openssl_object(self):\n# _log.debug(\"get_own_cert_as_openssl_object: node_name={}\".format(self.node_name))\n certpath, cert, certstr = self.get_own_cert()\n return cert", "def as_text(self):\n buf=BIO.MemoryBuffer()\n m2.x509_crl_print(buf.bio_ptr(), self.crl)\n return buf.read_all()", "def add_certificate_subject(subject, spec):\n certificate_subject = {}\n # the template we're using to build Certificate resource doesn't\n # accept fields with None or empty values. We just add fields below\n # if they are specified by the user, otherwise we simply ignore them.\n if subject.get('O'):\n spec['organization'] = [subject.get('O')]\n if subject.get('CN'):\n spec['commonName'] = subject.get('CN')\n if subject.get('OU'):\n certificate_subject['organizationalUnits'] = [subject.get('OU')]\n if subject.get('C'):\n certificate_subject['countries'] = [subject.get('C')]\n if subject.get('ST'):\n certificate_subject['provinces'] = [subject.get('ST')]\n if subject.get('L'):\n certificate_subject['localities'] = [subject.get('L')]\n spec['subject'] = certificate_subject\n return spec", "def CreateCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"CreateCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.CreateCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def __init__(__self__, *,\n certificate_body: pulumi.Input[str],\n private_key: pulumi.Input[str],\n certificate_chain: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n path: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):\n pulumi.set(__self__, \"certificate_body\", certificate_body)\n pulumi.set(__self__, \"private_key\", private_key)\n if certificate_chain is not None:\n pulumi.set(__self__, \"certificate_chain\", certificate_chain)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if name_prefix is not None:\n pulumi.set(__self__, \"name_prefix\", name_prefix)\n if path is not None:\n pulumi.set(__self__, \"path\", path)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)", "def extract_ca_crt_bytes_from_pem(pem_content):\n begin_search = pem_content.find(constants.BEGIN_CERTIFICATE_MARKER)\n if begin_search < 0:\n raise exception.InvalidKubernetesCA\n\n end_search = pem_content.find(constants.END_CERTIFICATE_MARKER)\n if end_search < 0:\n raise exception.InvalidKubernetesCA\n\n end_search += len(constants.END_CERTIFICATE_MARKER)\n base64_crt = base64.encode_as_text(pem_content[begin_search:end_search])\n return base64_crt", "def json_bytes_signed_canonical(request) -> bytes:\n return get_test_data(request, __name__, \"config_signed_canonical.json\")", "def save(self, filename, format=FORMAT_PEM):\n bio = BIO.openfile(filename, 'wb')\n if format == FORMAT_PEM:\n return m2.x509_req_write_pem(bio.bio_ptr(), self.req)\n elif format == FORMAT_DER:\n return m2.i2d_x509_req_bio(bio.bio_ptr(), self.req)\n else:\n raise ValueError(\"Unknown filetype. Must be either FORMAT_DER or FORMAT_PEM\")", "def fusion_api_get_certificate_info(self, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)", "def get_pvk_pem_from_bytes(pvk: bytes) -> bytes:\n sk = ecdsa.SigningKey.from_string(pvk, curve=CURVE)\n\n return sk.to_pem()", "def certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCertificateArgs']]]]:\n return pulumi.get(self, \"certificates\")", "def certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCertificateArgs']]]]:\n return pulumi.get(self, \"certificates\")", "def to_binary(self):\n c = containerize(exclude_fields(self))\n self.payload = MsgEd25519CertificateDep._parser.build(c)\n return self.pack()", "def get_pem(bucket_name: str, bucket_key: str) -> bytes:\n s3 = boto3.resource(\"s3\")\n s3.Bucket(bucket_name).download_file(bucket_key, \"/tmp/key.pem\")\n with open(\"/tmp/key.pem\", \"rb\") as f:\n return f.read()", "def _parse_certificate(cls, response):\n links = _parse_header_links(response)\n try:\n cert_chain_uri = links[u'up'][u'url']\n except KeyError:\n cert_chain_uri = None\n return (\n response.content()\n .addCallback(\n lambda body: messages.CertificateResource(\n uri=cls._maybe_location(response),\n cert_chain_uri=cert_chain_uri,\n body=body))\n )", "def certificate_chain(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate_chain\")", "def certificate_chain(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate_chain\")", "def to_json(self) -> Dict[str, Union[List[int], List[str]]]:\n fmt = serialization.PublicFormat.SubjectPublicKeyInfo\n obj: Dict[str, Union[List[int], List[str]]] = {}\n lst = []\n\n for pubkey in self.ringv2.values():\n try:\n pubbytes = pubkey.public_bytes(encoding=serialization.Encoding.DER, format=fmt)\n lst.append(pubbytes)\n except Exception as ex:\n logger.error(\"Could not serialize key: %s\", str(ex))\n\n obj[\"pubkeys\"] = [base64.b64encode(pubkey).decode(\"ascii\") for pubkey in lst]\n obj[\"keyids\"] = list(self.ringv2.keys())\n return obj", "def to_binary(self):\n c = containerize(exclude_fields(self))\n self.payload = MsgEcdsaCertificate._parser.build(c)\n return self.pack()", "def encode_dict(data: dict) -> Any:\n version = data[\"version\"] or DEFAULT_PROTO_VERSION\n operation = data[\"operation\"]\n request_id = data.get(\"request-id\", None)\n if request_id is None:\n request_id = random.choice(range(10000, 99999))\n\n encoded = struct.pack(\">bb\", *version)\n encoded += struct.pack(\">h\", operation.value)\n encoded += struct.pack(\">i\", request_id)\n\n encoded += struct.pack(\">b\", IppTag.OPERATION.value)\n\n if isinstance(data.get(\"operation-attributes-tag\", None), dict):\n for attr, value in data[\"operation-attributes-tag\"].items():\n encoded += construct_attribute(attr, value)\n\n if isinstance(data.get(\"job-attributes-tag\", None), dict):\n encoded += struct.pack(\">b\", IppTag.JOB.value)\n\n for attr, value in data[\"job-attributes-tag\"].items():\n encoded += construct_attribute(attr, value)\n\n if isinstance(data.get(\"printer-attributes-tag\", None), dict):\n encoded += struct.pack(\">b\", IppTag.PRINTER.value)\n\n for attr, value in data[\"printer-attributes-tag\"].items():\n encoded += construct_attribute(attr, value)\n\n encoded += struct.pack(\">b\", IppTag.END.value)\n\n return encoded", "def serialize_request(self, request, headers):\n content = self._write_content(request)\n headers.update({'Content-Length': str(len(content))})\n return content", "def _sign_cert(self, cert):\n with open(self._get_key_link(self.commonname), 'r') as private_file:\n data = private_file.read()\n pkey = crypto.load_privatekey(crypto.FILETYPE_PEM,\n data)\n cert.sign(pkey, 'sha256')", "def claim_req_json(self) -> str:\n\n return self._claim_req_json", "def sign_certificate_request(csr, rootkey, rootcrt, client_key, domain_name, notBefore, notAfter):\n\n serial_number = int(str(uuid.uuid4().int)[:20])\n crt = x509.CertificateBuilder().subject_name(\n csr.subject\n ).issuer_name(\n rootcrt.subject\n ).public_key(\n csr.public_key()\n ).serial_number(\n serial_number # pylint: disable=no-member\n ).not_valid_before(\n notBefore\n ).not_valid_after(\n notAfter\n ).add_extension(\n extension=x509.KeyUsage(\n digital_signature=True, key_encipherment=True, content_commitment=True,\n data_encipherment=False, key_agreement=False, encipher_only=False, decipher_only=False, key_cert_sign=False, crl_sign=False\n ),\n critical=True\n ).add_extension(\n extension=x509.BasicConstraints(ca=False, path_length=None),\n critical=True\n ).add_extension(\n extension=x509.AuthorityKeyIdentifier.from_issuer_public_key(rootkey.public_key()),\n critical=False\n ).add_extension(\n csr.extensions.get_extension_for_oid(ExtensionOID.SUBJECT_ALTERNATIVE_NAME).value,\n critical=False,\n ).sign(\n private_key=rootkey,\n algorithm=hashes.SHA256(),\n backend=default_backend()\n )\n\n ##storing client's .crt\n with open(domain_name + \".crt\", 'wb') as f:\n f.write(crt.public_bytes(encoding=serialization.Encoding.PEM))", "def sign_certificate(csr):\n unique_filename = str(uuid.uuid4().hex)\n\n file = open(\"./csr_req/%s.csr\" % unique_filename, \"w\")\n file.write(csr.decode(\"utf-8\"))\n file.close()\n\n subprocess.run([\"../ca/scripts/sign.sh\", unique_filename], check=False)\n\n file = open(\"./csr_req/%s.p7b\" % unique_filename, \"r\")\n cert = file.read()\n\n os.remove(\"./csr_req/%s.csr\" % unique_filename)\n os.remove(\"./csr_req/%s.p7b\" % unique_filename)\n\n return cert", "def ModifyCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"ModifyCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.ModifyCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))" ]
[ "0.6295511", "0.6238403", "0.6178628", "0.6023338", "0.596094", "0.5930526", "0.5825464", "0.5816554", "0.57833594", "0.57833594", "0.5726823", "0.571607", "0.56658447", "0.5638736", "0.56148493", "0.55883175", "0.5558911", "0.54970115", "0.54970115", "0.54914117", "0.5478445", "0.5478445", "0.5478445", "0.5471324", "0.5452907", "0.5419648", "0.5403536", "0.5362289", "0.5354722", "0.53500247", "0.5341417", "0.5335588", "0.5330579", "0.5328324", "0.5306214", "0.5305468", "0.5305346", "0.5258497", "0.5254093", "0.5254093", "0.5254093", "0.5254093", "0.5254093", "0.5254093", "0.5179875", "0.517227", "0.516662", "0.5165338", "0.51624066", "0.5143595", "0.51132584", "0.50885767", "0.5083978", "0.50739086", "0.5065724", "0.5062076", "0.5062076", "0.5050172", "0.5042384", "0.50379276", "0.5016861", "0.49918902", "0.49779847", "0.49763596", "0.49375772", "0.49348632", "0.4901991", "0.4901991", "0.48995924", "0.48891294", "0.4888613", "0.48762196", "0.48646146", "0.4850204", "0.4848903", "0.4842646", "0.483841", "0.4827633", "0.482262", "0.48031527", "0.480303", "0.47990552", "0.47983557", "0.4796414", "0.47920522", "0.47920522", "0.4787686", "0.47807178", "0.47800383", "0.47743043", "0.47743043", "0.47702295", "0.47477955", "0.47471878", "0.47290906", "0.47213274", "0.47123972", "0.47118494", "0.4710893", "0.4700379" ]
0.7018637
0
Read private key, decrypt if needed.
def load_key(fn, psw=None): if not fn: die("Need private key") if psw: psw = as_bytes(psw) data = load_gpg_file(fn) key = load_pem_private_key(data, password=psw, backend=get_backend()) return key
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def decrypt(data, private_key):\r\n\r\n # Retrieve session key, tag, ciphertext and nonce from file\r\n enc_session_key, nonce, tag, ciphertext = \\\r\n [ file_in.read(x) for x in (private_key.size_in_bytes(), 16, 16, -1) ]\r\n\r\n\r\n # Decrypt the session key\r\n session_key = cipher_rsa.decrypt(enc_session_key)\r\n\r\n # Decrypt the data with the AES session key\r\n cipher_aes = AES.new(session_key, AES.MODE_EAX, nonce)\r\n data = cipher_aes.decrypt_and_verify(ciphertext, tag)\r\n\r\n return data", "def rsa_decrypt(cypher, privatekey):\r\n \r\n # A key object is created to interact with the PyCrypto\r\n # encryption suite. The object contains key data and\r\n # the necessary rsa functions.\r\n temp_key_obj = _rsa_keydict_to_keyobj(privatekey = privatekey) \r\n \r\n return _rsa_gluechops(cypher, temp_key_obj, temp_key_obj.decrypt)", "def decrypt(self, key, device, private_key):\n device_key = base64.b64decode(self.keys[device.id.hex])\n\n master_key = private_key_decrypt(private_key, device_key)\n\n if master_key is None:\n return\n\n return fernet_decrypt(self.values[key], master_key, self.salt)", "def decrypt(self, cypher):\n\n if self.crypt_private == \"\":\n raise ValueError(\"Error decrypting: No private encryption key found for {}\".format(self))\n\n key_private = RsaPrivateKey.Read(self.crypt_private)\n return key_private.Decrypt(cypher)", "def get_private_key():\n if not os.path.exists(_private_key_path):\n return None\n\n try:\n with open(_private_key_path) as secret_file:\n return secret_file.read()\n\n except Exception as exc:\n log.error(f'Could not read private key.\\n{exc}')\n traceback.print_exc(file=sys.stderr)", "def decrypt_using_private_key(message):\n public_key_path = os.path.join('keys', 'private.key')\n with open(public_key_path, 'rb') as file:\n private_key = RSA.importKey(file.read())\n\n cipher = PKCS1_OAEP.new(private_key)\n encrypted = cipher.decrypt(message)\n return encrypted.hex()", "def decrypt(data, key):\n data = six.ensure_binary(data)\n try:\n data = privy.peek(hidden=data, password=key)\n except ValueError:\n error = \"Unable to decrypt {cnt} bytes of data using key {k}, invalid key!\"\n error = error.format(cnt=len(data), k=key)\n raise exceptions.ModuleError(error)\n return six.ensure_text(data)", "def _get_decryption_key(self, **options):\n\n return self._private_key", "def test_get_private_key(self):\n\n expected = self.pem_private_key\n\n encryptor = DataEncryption()\n encryptor.set_private_key(self.pem_private_key.decode())\n\n actual = encryptor.get_private_key()\n\n self.assertEqual(expected, actual)", "def rsa_decrypt(data, rsa_priv_key_str):\r\n key = RSA.importKey(rsa_priv_key_str)\r\n cipher = PKCS1_OAEP.new(key)\r\n return cipher.decrypt(data)", "def load_private_key(self, private_key):\n if not self.curve:\n self.curve = private_key.curve\n if self.curve != private_key.curve:\n raise InvalidCurveError(\"Curve mismatch.\")\n self.private_key = private_key\n return self.private_key.get_verifying_key()", "def decrypt(self, cypher):\n\n cypher = b64decode(cypher)\n key_private = RsaPrivateKey.Read(self.crypt_private)\n return key_private.Decrypt(cypher)", "def _decrypt_pvtkey(self, pvtkey_file: str, passphrase: str) -> str:\n\n keydata: str = None\n if pvtkey_file:\n try:\n keydata = asyncssh.public_key.read_private_key(pvtkey_file,\n passphrase)\n except Exception as e:\n self.logger.error(\n f\"ERROR: Unable to read private key file {pvtkey_file}\"\n f\"for jump host due to {str(e)}\")\n\n return keydata", "def load_private(file):\n with open(file, \"rb\") as pemfile:\n key = jwk.JWK.from_pem(pemfile.read())\n\n logging.info('Loaded private key from {}'.format(file))\n return key", "def decrypt(path, key):\n key = load_key(key)\n\n if p.isdir(path):\n # encrypt a directory\n return decrypt_dir(path, key)\n # decrypt a file\n path = decrypt_file(path, key)\n # check if file contains suffix\n if \"-encrypted.zip\" in path:\n return decrypt_dir(path, key)\n return", "def decrypt(cipher):\n setup()\n\n # Read in p, q, and d from the private key file\n ifp = open(\"private.rsa\")\n private = ifp.readlines()\n d = int(private[-1])\n\n # Read in n from the public key file\n ifp = open(\"public.rsa\")\n public = ifp.readlines()\n n = int(public[-1])\n\n # Compute c^d(mod n)\n m = str(pow(long(cipher), d, n))\n\n # Convert back to alphabets\n if len(m) % 2:\n m = '0' + m\n plaintext = ''\n for i in range(0, len(m), 2):\n plaintext += chr(int(m[i:i+2]) - 1 + ord('a'))\n\n return plaintext", "def load_private_key_bytes(self, private_key):\n if not self.curve:\n raise NoCurveError(\"Curve must be set prior to key load.\")\n return self.load_private_key(\n SigningKey.from_string(private_key, curve=self.curve))", "def decrypt_pk(priv_key, ciphertext):\n try:\n plaintext = priv_key.decrypt(\n b64decode(ciphertext),\n padding.OAEP(\n mgf=padding.MGF1(algorithm=CryptoHash()),\n algorithm=CryptoHash(),\n label=None\n )\n )\n except UnsupportedAlgorithm as e:\n # a failure to dencrypt someone else's data is not typically a fatal\n # error, but in this particular case, the most likely cause of this\n # error is an old cryptography library\n logging.error(\"Fatal error: encryption hash {} unsupported, try upgrading to cryptography >= 1.4. Exception: {}\".format(\n CryptoHash, e))\n # re-raise the exception for the caller to handle\n raise e\n return plaintext", "def decrypt(cypher, priv_key):\n\n if not isinstance(priv_key, key.PrivateKey):\n raise TypeError(\"You must use the private key with decrypt\")\n\n return gluechops(cypher, priv_key.d, priv_key.n, decrypt_int)", "def load_private_key(filename):\n\twith open(str(filename) + \"_key.pem\", \"rb\") as key_file:\n\t\treturn serialization.load_pem_private_key(\n\t\tkey_file.read(),\n\t\tpassword=None,\n\t\tbackend=default_backend()\n\t)", "def _load_private_key(self, filename, keytype=None):\n type_map = {\n 'dsa': ssh.DSSKey,\n 'rsa': ssh.RSAKey}\n\n if keytype is None:\n with open(filename, 'rb') as k:\n keydata = k.read()\n \n m = re.search(\"BEGIN (.*?) PRIVATE KEY\", keydata)\n if m:\n keytype = m.group(1)\n\n keycls = type_map.get(keytype.lower(), 'dsa')\n\n try:\n key = keycls.from_private_key_file(filename)\n log.debug(\"Loaded key '%s' without password.\", filename)\n except ssh.PasswordRequiredException:\n passphrase = self.config.get('passphrase')\n \n if callable(passphrase):\n passphrase = passphrase(filename,\n self.config.get('remote_host', 'localhost'),\n self.config.get('username', getpass.getuser()))\n if passphrase is None:\n return\n\n if not passphrase:\n passphrase = getpass.getpass(\"Key passphrase: \")\n \n key = keycls.from_private_key_file(filename, passphrase)\n\n return key", "def __decryptRSA(msg, user):\n # Load user's private key\n try:\n with open(\"%s/%s/keys/privateKey.pem\" % (USERS, user), \"rb\") as f:\n privateKey = serialization.load_pem_private_key(\n f.read(),\n password=None,\n backend=default_backend()\n )\n f.close()\n except:\n print(\"Error opening user's private key\")\n print(sys.exc_info())\n return None\n \n # Decrypt message\n return privateKey.decrypt(\n msg, \n padding.OAEP(\n mgf=padding.MGF1(algorithm=hashes.SHA256()),\n algorithm=hashes.SHA256(),\n label=None\n )\n )", "def test_set_private_key(self) -> None:\n\n expected = self.pem_private_key.decode()\n\n encryptor = DataEncryption(private_key=self.pem_private_key)\n\n # pylint: disable=protected-access\n actual = encryptor._loaded_private_key.private_bytes(\n serialization.Encoding.PEM,\n serialization.PrivateFormat.PKCS8,\n serialization.NoEncryption(),\n ).decode()\n\n self.assertEqual(expected, actual)", "def load_private_key_pem(self, private_key_pem):\n return self.load_private_key(SigningKey.from_pem(private_key_pem))", "def load_private_key_der(self, private_key_der):\n return self.load_private_key(SigningKey.from_der(private_key_der))", "def decrypt(private_key, ciphertext):\n if len(ciphertext) < 512 + 16:\n return None\n msg_header = ciphertext[:512]\n msg_iv = ciphertext[512:512+16]\n msg_body = ciphertext[512+16:]\n try:\n symmetric_key = PKCS1_OAEP.new(private_key).decrypt(msg_header)\n except ValueError:\n return None\n if len(symmetric_key) != 32:\n return None\n return AES.new(symmetric_key,\n mode=AES.MODE_CFB,\n IV=msg_iv).decrypt(msg_body)", "def parse_private(data, password=None):\n\n if not isinstance(data, byte_cls):\n raise TypeError(pretty_message(\n '''\n data must be a byte string, not %s\n ''',\n type_name(data)\n ))\n\n if password is not None:\n if not isinstance(password, byte_cls):\n raise TypeError(pretty_message(\n '''\n password must be a byte string, not %s\n ''',\n type_name(password)\n ))\n else:\n password = b''\n\n # Appears to be PEM formatted\n if re.match(b'\\\\s*-----', data) is not None:\n key_type, _, data = _unarmor_pem(data, password)\n\n if key_type == 'public key':\n raise ValueError(pretty_message(\n '''\n The data specified does not appear to be a private key, but\n rather a public key\n '''\n ))\n\n if key_type == 'certificate':\n raise ValueError(pretty_message(\n '''\n The data specified does not appear to be a private key, but\n rather a certificate\n '''\n ))\n\n try:\n pki = PrivateKeyInfo.load(data)\n # Call .native to fully parse since asn1crypto is lazy\n pki.native\n return pki\n except (ValueError):\n pass # Data was not PrivateKeyInfo\n\n try:\n parsed_wrapper = EncryptedPrivateKeyInfo.load(data)\n encryption_algorithm_info = parsed_wrapper['encryption_algorithm']\n encrypted_data = parsed_wrapper['encrypted_data'].native\n decrypted_data = _decrypt_encrypted_data(encryption_algorithm_info, encrypted_data, password)\n pki = PrivateKeyInfo.load(decrypted_data)\n # Call .native to fully parse since asn1crypto is lazy\n pki.native\n return pki\n except (ValueError):\n pass # Data was not EncryptedPrivateKeyInfo\n\n try:\n parsed = RSAPrivateKey.load(data)\n # Call .native to fully parse since asn1crypto is lazy\n parsed.native\n return PrivateKeyInfo.wrap(parsed, 'rsa')\n except (ValueError):\n pass # Data was not an RSAPrivateKey\n\n try:\n parsed = DSAPrivateKey.load(data)\n # Call .native to fully parse since asn1crypto is lazy\n parsed.native\n return PrivateKeyInfo.wrap(parsed, 'dsa')\n except (ValueError):\n pass # Data was not a DSAPrivateKey\n\n try:\n parsed = ECPrivateKey.load(data)\n # Call .native to fully parse since asn1crypto is lazy\n parsed.native\n return PrivateKeyInfo.wrap(parsed, 'ec')\n except (ValueError):\n pass # Data was not an ECPrivateKey\n\n raise ValueError(pretty_message(\n '''\n The data specified does not appear to be a known private key format\n '''\n ))", "def read_key(self, keyfile_name):\n\n with open(keyfile_name, 'rb') as f:\n self.key = f.read()\n self.cryptor = Fernet(self.key)", "def decrypt(priv_key, ciphertext):\n pk_encrypted_secret_key = ciphertext['pk_encrypted_secret_key']\n sym_encrypted_data = ciphertext['sym_encrypted_data']\n # TODO: secure delete\n secret_key = decrypt_pk(priv_key, pk_encrypted_secret_key)\n encoded_string = decrypt_symmetric(secret_key, sym_encrypted_data)\n return decode_data(encoded_string)", "def solve(key_data: bytes) -> PrivateKey:\n return { # type: ignore\n Encoding.PEM: load_pem_private_key,\n Encoding.DER: load_der_private_key\n }[real_encoding](key_data, password, default_backend())", "def decrypt(self, message):\n return self._keypair.decrypt(message)", "def get_private_key_in_pem(self):\n serialized_private = self.private_key_obj.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()\n )\n return serialized_private", "def _wrap_privatekey(self) -> None:\n p_der = ffi.new(\"unsigned char **\")\n der_len = lib.i2d_PrivateKey(self._pkey, p_der)\n if der_len < 0:\n raise InvalidPKeyError(\"Could not serialize private key\")\n try:\n der = ffi.buffer(p_der[0], der_len)[:]\n try:\n self._key = load_der_private_key(der, password=None,\n backend=default_backend())\n except ValueError as exc:\n raise InvalidPKeyError from exc\n finally:\n lib.OPENSSL_free(p_der[0])", "def load_key():\n return open(\"Secret.key\",\"rb\").read()", "def rsa_decrypt(self, thing):\n return self.true_private_key.decrypt(\n thing,\n cryptography.hazmat.primitives.asymmetric.padding.OAEP(\n mgf=cryptography.hazmat.primitives.asymmetric.padding.MGF1(\n algorithm=hashes.SHA256()),\n algorithm=hashes.SHA256(),\n label=None\n )\n )", "def decrypt_file(self, input_file_name='', output_file_name=''):\n\n # Checking if input and output files selected right\n assert input_file_name and isfile(input_file_name), \"Input file wasn't selected!\"\n assert output_file_name, \"Output file wasn't selected!\"\n\n with open(output_file_name, 'wb') as output_file:\n # To iterate file as int values, I'm using generator\n input_file = self._open_file_longint(input_file_name)\n try:\n alpha = input_file.__next__()\n beta = input_file.__next__()\n except StopIteration:\n raise AssertionError(\"Input file is empty! Nothing to decrypt.\")\n\n x = self.keys['private']\n p = self.keys['public']['p']\n\n while alpha and beta:\n message_byte = bytes(chr((beta % p * (pow(alpha, (p - 1 - x), p))) % p), \"ascii\")\n output_file.write(message_byte)\n try:\n alpha = input_file.__next__()\n beta = input_file.__next__()\n except StopIteration:\n alpha = 0\n beta = 0\n return 1", "def decrypt(self, message):\n #check validity of _private_key\n if self._private_key is None:\n raise Exception(\"invalid private key\")\n\n output = \"\"\n\n d = self._private_key[0]\n n = self._private_key[1]\n\n for i in xrange(len(ciphertext)):\n m = pow(ciphertext[i], d, n)\n output += int_to_string(m)\n return output", "def decrypt(s):\n if s is None:\n return None\n else:\n # try:\n enc_value = ast.literal_eval(s)\n private_key = serialization.load_pem_private_key(\n pkey.encode('utf-8'),\n password=None,\n backend=default_backend()\n )\n\n dec = private_key.decrypt(\n enc_value,\n padding.OAEP(\n mgf=padding.MGF1(algorithm=hashes.SHA256()),\n algorithm=hashes.SHA256(),\n label=None\n )\n )\n return dec.decode()", "def Read(key):\n rsa = json.loads(key)\n pub = RsaPublicKey.Read(json.dumps(rsa['publicKey']))\n params = {'privateExponent': util.Decode(rsa['privateExponent']),\n 'primeP': util.Decode(rsa['primeP']),\n 'primeQ': util.Decode(rsa['primeQ']),\n 'primeExponentP': util.Decode(rsa['primeExponentP']),\n 'primeExponentQ': util.Decode(rsa['primeExponentQ']),\n 'crtCoefficient': util.Decode(rsa['crtCoefficient'])\n }\n\n key = RSA.construct((util.BytesToLong(pub.params['modulus']),\n util.BytesToLong(pub.params['publicExponent']),\n util.BytesToLong(params['privateExponent']),\n util.BytesToLong(params['primeQ']),\n util.BytesToLong(params['primeP']),\n util.BytesToLong(params['crtCoefficient'])))\n return RsaPrivateKey(params, pub, key, rsa['size'])", "def decryptor(file_name, key):\n\twith open(file_name, 'rb') as dfile:\n\t\tciphertext = dfile.read()\n\t\tdec = decrypt(key, ciphertext)\n\t\tdfile.close()\n\t\tdtext = \"The encrypted file was opened by macupdate.py by the user: \"\n\t\tcreateLog(dtext, 'logs/macupdate.log')\n\t\treturn dec", "def rsa_file_to_privatekey(filename):\r\n fileobject = file(filename,'r')\r\n privatekeystring = fileobject.read()\r\n fileobject.close()\r\n\r\n return rsa_string_to_privatekey(privatekeystring)", "def _get_private_key(self, privkey=None):\n\n # read private keys from keyring\n privkeys = self.gpg.list_keys(True) # True => private keys\n if len(privkeys) > 0 and privkeys[-1].has_key('fingerprint'):\n fingerprints = []\n for k in privkeys:\n fingerprints.append(k['fingerprint'])\n else:\n # no private key in keyring\n return None\n\n if privkey:\n # check for existence of private key received as argument\n # DEVEL: check for expiration as well\n if len(privkey) > 7 and len(privkey) <= 40:\n for fp in fingerprints:\n if fp.endswith(privkey):\n # work with last 16 significant chars internally,\n # even if only 8 are required in trac.ini\n privkey = fp[-16:]\n break\n # no fingerprint matching key ID\n else:\n privkey = None\n else:\n # reset invalid key ID\n privkey = None\n else:\n # select (last) private key from keyring\n privkey = fingerprints[-1][-16:]\n\n return privkey", "def _try_load_ca_private_key(path):\n pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, open(path, 'rb').read())\n if pkey.bits() < 2048:\n raise ValueError(\"I'm sorry Dave, I can't let you use a small \"\n \"RSA key.\")\n pkey.check()\n return pkey", "def get_private_key(self):\n# _log.debug(\"get_private_key: node_name={}\".format(self.node_name))\n with open(os.path.join(self.runtime_dir, \"private\", \"private.key\"), 'rb') as f:\n return f.read()", "def private_key(self):\n if self._private_key is not None:\n return self._private_key[0]\n\n spk = self.serialized_private_key\n passphrase = self.passphrase\n\n try:\n self._private_key = [\n serialization.load_pem_private_key(\n self.serialized_private_key,\n backend=default_backend(),\n password=self.passphrase)]\n\n return self._private_key[0]\n\n except:\n raise\n self._private_key = [None]\n return self._private_key[0]", "def decrypt(crypto, priv):\r\n string = rsa.encrypt(crypto, priv)\r\n string = livingDead.utfE(crypto)\r\n return crypto", "def load_key():\n return open(\"secret.key\", \"rb\").read()", "def _serialize_private_key(private_key, password=None):\n error = None\n pvt_key_loaders = [\n load_pem_private_key, load_der_private_key\n ]\n pvt_key = None\n for loader in pvt_key_loaders:\n if not pvt_key:\n try:\n pvt_key = loader(\n private_key.encode('utf-8'),\n password=password,\n backend=default_backend()\n )\n error = False\n break\n except (ValueError, UnsupportedAlgorithm) as err:\n error = err\n if error:\n raise errors.InvalidPrivateKeyError(error)\n else:\n return pvt_key", "def decrypt(self, key, data, mode, padding):\n # pylint: disable=unused-argument,no-self-use\n if hasattr(key, \"public_bytes\"):\n raise NotImplementedError('\"decrypt\" is not supported by public keys')\n try:\n return key.decrypt(data, padding.build())\n except Exception:\n error_message = \"Decryption failed\"\n _LOGGER.exception(error_message)\n raise DecryptionError(error_message)", "def deserializePrivateKey(string, bc = backend):\n\tif type(string) == str:\n\t\tstring = string.encode('utf8')\n\treturn serialization.load_pem_private_key(string, password = None , backend = bc)", "async def retrieve_private_key(self) -> Tuple[str, str]:\n\n filename, file_path = random.choice(self._private_keys)\n async with aiofiles.open(file_path, mode='r') as file:\n private_key = await file.read()\n return private_key, self._create_public_key_identifier(filename)", "def parsePrivateKey(s):\r\n return parsePEMKey(s, private=True)", "def decrypt(key, cipher, plaintext):\n\n rsa = Rsa()\n\n try:\n k = TomlKeyFormatter().from_string(key.read())\n\n c = cipher.read()\n p = rsa.decrypt(c, k)\n\n plaintext.write(p)\n\n except KeyFormatError:\n click.echo(\"ERROR: Key is in bad format\")\n\n except DecryptError:\n click.echo(\"ERROR: Key is wrong or message was badly padded before encryption\")", "def decrypt(self,message, key):\n return self.translateMessage(message, key, \"decrypt\")", "def get(self, key_name: str, password: str = None) -> PrivateKey:\n pass", "def decrypt_key(data, key):\n data = MegaCrypto.base64_decode(data)\n return sum((MegaCrypto.str_to_a32(MegaCrypto.cbc_decrypt(data[_i:_i + 16], key))\n for _i in range(0, len(data), 16)), ())", "def read_private_key_file(pkey_file,\n pkey_password=None,\n key_type=None,\n logger=None):\n ssh_pkey = None\n key_types = (paramiko.RSAKey, paramiko.DSSKey, paramiko.ECDSAKey)\n if hasattr(paramiko, 'Ed25519Key'):\n # NOQA: new in paramiko>=2.2: http://docs.paramiko.org/en/stable/api/keys.html#module-paramiko.ed25519key\n key_types += (paramiko.Ed25519Key, )\n for pkey_class in (key_type,) if key_type else key_types:\n try:\n ssh_pkey = pkey_class.from_private_key_file(\n pkey_file,\n password=pkey_password\n )\n if logger:\n logger.debug('Private key file ({0}, {1}) successfully '\n 'loaded'.format(pkey_file, pkey_class))\n break\n except paramiko.PasswordRequiredException:\n if logger:\n logger.error('Password is required for key {0}'\n .format(pkey_file))\n break\n except paramiko.SSHException:\n if logger:\n logger.debug('Private key file ({0}) could not be loaded '\n 'as type {1} or bad password'\n .format(pkey_file, pkey_class))\n return ssh_pkey", "def Read(key):\n rsa = json.loads(key)\n pub = RsaPublicKey.Read(json.dumps(rsa['publicKey']))\n params = {\n 'privateExponent': util.Base64WSDecode(rsa['privateExponent']),\n 'primeP': util.Base64WSDecode(rsa['primeP']),\n 'primeQ': util.Base64WSDecode(rsa['primeQ']),\n 'primeExponentP': util.Base64WSDecode(rsa['primeExponentP']),\n 'primeExponentQ': util.Base64WSDecode(rsa['primeExponentQ']),\n 'crtCoefficient': util.Base64WSDecode(rsa['crtCoefficient'])\n }\n\n key = RSA.construct((util.BytesToLong(pub.params['modulus']),\n util.BytesToLong(pub.params['publicExponent']),\n util.BytesToLong(params['privateExponent']),\n util.BytesToLong(params['primeQ']),\n util.BytesToLong(params['primeP']),\n util.BytesToLong(params['crtCoefficient'])))\n return RsaPrivateKey(params, pub, key, rsa['size'])", "def get_private_key_in_der(self):\n serialized_private = self.private_key_obj.private_bytes(\n encoding=serialization.Encoding.DER,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()\n )\n return serialized_private", "def passwd_decryption(self):\n with open(self.key_path, 'rb') as input_key:\n for line in input_key:\n key = line\n with open(self.pass_path, 'rb') as input_password:\n for line in input_password:\n password = line\n cipher_suit = Fernet(key)\n plain_password = cipher_suit.decrypt(password)\n plain_password = bytes(plain_password).decode('utf-8')\n \n return plain_password", "def _unwrap_private_key_info(key_info):\n\n key_alg = key_info.algorithm\n\n if key_alg == 'rsa' or key_alg == 'rsassa_pss':\n return key_info['private_key'].parsed\n\n if key_alg == 'dsa':\n params = key_info['private_key_algorithm']['parameters']\n parsed = key_info['private_key'].parsed\n return DSAPrivateKey({\n 'version': 0,\n 'p': params['p'],\n 'q': params['q'],\n 'g': params['g'],\n 'public_key': Integer(pow(\n params['g'].native,\n parsed.native,\n params['p'].native\n )),\n 'private_key': parsed,\n })\n\n if key_alg == 'ec':\n parsed = key_info['private_key'].parsed\n parsed['parameters'] = key_info['private_key_algorithm']['parameters']\n return parsed\n\n raise ValueError('Unsupported key_info.algorithm \"%s\"' % key_info.algorithm)", "def load_key():\n return open(\"pass.key\", \"rb\").read()", "def get_key_from_keyring(self):\n private_key = keyring.get_password(self.keyring_service_name, \"private_key\")\n\n if private_key is not None:\n return base64.b64decode(private_key)\n else:\n return None", "def private_key(self):\n return PrivateKey(self._sk.private_bytes(\n encoding=serialization.Encoding.Raw,\n format=serialization.PrivateFormat.Raw,\n encryption_algorithm=serialization.NoEncryption()))", "def read_keypair(priv_key_file, public_key_file):\n key_pair = {}\n with open(priv_key_file) as f:\n key_data = f.read()\n f.close()\n key_pair[\"key\"] = key_data\n with open(public_key_file) as f:\n pub_data = f.read()\n f.close()\n key_pair[\"pub\"] = pub_data\n for i in [priv_key_file, public_key_file]:\n os.remove(i)\n return key_pair", "def _decrypt(self):\n self._outfile = os.path.join(self.dest, self.plain_file)\n self._infile = self.encrypted_file\n self._log.info(\"Decrypting file '%s' to '%s'\", self.encrypted_file, self._outfile)\n with open(self.encrypted_file, \"rb\") as enc_file:\n openssl(\n \"enc\",\n \"-aes-256-cbc\",\n \"-d\",\n \"-pass\",\n \"file:{secret}\".format(secret=self.secret.keyfile),\n _in=enc_file,\n _out=self._outfile,\n )\n self._log.info(\"File '%s' decrypted to '%s'\", self.encrypted_file, self._outfile)\n return True", "def readKey(self, keyPath):\n\t\ttry:", "def decrypt(ctx, input, output):\n gpg_key = _get_gpg_key(_get_pem(ctx().source), ctx().user, ctx().verbose)\n _run_gpg_with_key(gpg_key, [\n '--decrypt', '--recipient',\n ctx().user, '--trust-model', 'always', '--armor'\n ], input, output, ctx().verbose)", "def decrypt(enc_data=None, pk=None, sk=None, pairing_group=None, debug=0):\n\n # Check if enc_data is set\n if enc_data is None:\n logging.error('decrypt_seed_key ciphertext exception')\n if debug: # ONLY USE FOR DEBUG\n print('EXCEPTION in decrypt_seed_key ciphertext')\n raise Exception\n\n # Check if pk is set and it exists\n if pk is None:\n logging.error('[ERROR] decrypt_seed_key pk_file exception')\n if debug: # ONLY USE FOR DEBUG\n print('EXCEPTION in decrypt_seed_key pk_file')\n raise Exception\n\n # Check if sk is set and it exists\n if sk is None:\n logging.error('decrypt_seed_key sk_file exception')\n if debug: # ONLY USE FOR DEBUG\n print('EXCEPTION in decrypt_seed_key sk_file')\n raise Exception\n\n # Decrypt data with CP-ABE and return the result\n cpabe = CPabe_BSW07(pairing_group)\n return cpabe.decrypt(pk, sk, enc_data)", "def decrypt_reader(in_path, password, key_length=32):\n with open(in_path, 'rb') as in_file:\n password = str.encode(password)\n block_size = AES.block_size\n salt = in_file.read(block_size)#[len(b'Salted__'):]\n key, i_v = derive_key_and_iv(password, salt, key_length, block_size)\n cipher = AES.new(key, AES.MODE_CBC, i_v)\n next_chunk = b''\n finished = False\n csv_bytes = b''\n while not finished:\n chunk = next_chunk\n next_chunk = cipher.decrypt(in_file.read(1024 * block_size))\n if len(next_chunk) == 0:\n padding_length = chunk[-1]\n if padding_length < 1 or padding_length > block_size:\n raise ValueError(\"Password incorrect\")\n chunk = chunk[:-padding_length]\n finished = True\n csv_bytes += chunk\n newline_pos = csv_bytes.find(b\"\\r\\n\")\n while newline_pos != -1:\n try:\n yield bytes_to_csv(csv_bytes[:(newline_pos + 2)])[0]\n except:\n raise ValueError(\"Password incorrect\")\n csv_bytes = csv_bytes[(newline_pos + 2):]\n newline_pos = csv_bytes.find(b\"\\r\\n\")", "def Private(self):\n self.Send(self.EncryptString('private\\n'))\n print self.DecryptString(self.Recv(4096))\n print self.DecryptString(self.Recv(4096))", "def do_ios_decryption(self):\r\n try:\r\n self.aes_decryption_key = self.extract_aes_key()\r\n except DecryptionKeyInvalidError:\r\n self.aes_decryption_key = self.get_backup_encryption_key()\r\n self.used_ios_decryption_key_cache = True\r\n \r\n self.decrypt_device_file()\r\n # join is optimized and does not cause O(n^2) total memory copies.\r\n self.decrypted_file = b\"\\n\".join(self.good_lines)", "def decrypt_epic(aes_key, encrypted_data):\n # Decode encrypted string\n decoded = base64.b64decode(encrypted_data)\n\n # Decrypt decoded string\n decoded_readable = CryptDecrypt(aes_key, decoded).decode('utf-8')\n return decoded_readable", "def decrypt_epic(aes_key, encrypted_data):\n # Decode encrypted string\n decoded = base64.b64decode(encrypted_data)\n\n # Decrypt decoded string\n decoded_readable = CryptDecrypt(aes_key, decoded).decode('utf-8')\n return decoded_readable", "def load_private_key(file_path: str, password: bytes = None,\n encoding: Encoding = None) -> PrivateKey:\n real_encoding = encoding or _get_encoding_type(file_path)\n\n def solve(key_data: bytes) -> PrivateKey:\n \"\"\"Determine the type of data and perform loading based on data type.\n\n :param key_data: given private keys data\n :return: loaded private key\n \"\"\"\n return { # type: ignore\n Encoding.PEM: load_pem_private_key,\n Encoding.DER: load_der_private_key\n }[real_encoding](key_data, password, default_backend())\n\n return generic_load(file_path, solve)", "def test_set_private_key_setter(self) -> None:\n\n expected = self.pem_private_key.decode()\n\n encryptor = DataEncryption()\n encryptor.set_private_key(self.pem_private_key)\n\n # pylint: disable=protected-access\n actual = encryptor._loaded_private_key.private_bytes(\n serialization.Encoding.PEM,\n serialization.PrivateFormat.PKCS8,\n serialization.NoEncryption(),\n ).decode()\n\n self.assertEqual(expected, actual)", "def decrypt(self, data):", "def decrypt(project_id, location_id, key_ring_id, crypto_key_id,\n ciphertext_file_name, plaintext_file_name):\n\n # Creates an API client for the KMS API.\n kms_client = googleapiclient.discovery.build('cloudkms', 'v1')\n\n # The resource name of the CryptoKey.\n name = 'projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}'.format(\n project_id, location_id, key_ring_id, crypto_key_id)\n\n # Read encrypted data from the input file.\n with io.open(ciphertext_file_name, 'rb') as ciphertext_file:\n ciphertext = ciphertext_file.read()\n\n # Use the KMS API to decrypt the data.\n crypto_keys = kms_client.projects().locations().keyRings().cryptoKeys()\n request = crypto_keys.decrypt(\n name=name,\n body={'ciphertext': base64.b64encode(ciphertext).decode('ascii')})\n response = request.execute()\n plaintext = base64.b64decode(response['plaintext'].encode('ascii'))\n\n # Write the decrypted data to a file.\n with io.open(plaintext_file_name, 'wb') as plaintext_file:\n plaintext_file.write(plaintext)\n\n print('Saved plaintext to {}.'.format(plaintext_file_name))", "def decrypt(self, key):\n super(MACDataUplinkMessage, self).decrypt(key, dir=0)", "def get_pem():\n try:\n with open('encrypted_pem.txt', 'r') as encrypted_pem:\n pem_file = encrypted_pem.read()\n\n kms = boto3.client('kms', region_name=REGION)\n return kms.decrypt(CiphertextBlob=b64decode(pem_file))['Plaintext']\n except (IOError, ClientError, KeyError) as err:\n LOGGER.error(err)\n return False", "def parsePEM(s, passwordCallback=None):\r\n\r\n if pemSniff(s, \"PRIVATE KEY\"):\r\n bytes = dePem(s, \"PRIVATE KEY\")\r\n return Python_RSAKey._parsePKCS8(bytes)\r\n elif pemSniff(s, \"RSA PRIVATE KEY\"):\r\n bytes = dePem(s, \"RSA PRIVATE KEY\")\r\n return Python_RSAKey._parseSSLeay(bytes)\r\n else:\r\n raise SyntaxError(\"Not a PEM private key file\")", "def _decrypt(self, data, key):\n seed1 = key\n seed2 = 0xEEEEEEEE\n result = BytesIO()\n\n for i in range(len(data) // 4):\n seed2 += self.encryption_table[0x400 + (seed1 & 0xFF)]\n seed2 &= 0xFFFFFFFF\n value = struct.unpack(\"<I\", data[i*4:i*4+4])[0]\n value = (value ^ (seed1 + seed2)) & 0xFFFFFFFF\n\n seed1 = ((~seed1 << 0x15) + 0x11111111) | (seed1 >> 0x0B)\n seed1 &= 0xFFFFFFFF\n seed2 = value + seed2 + (seed2 << 5) + 3 & 0xFFFFFFFF\n\n result.write(struct.pack(\"<I\", value))\n\n return result.getvalue()", "def _load_key(self, path):\n with open(path, 'r') as f:\n self._key = f.readline().strip()\n self._secret = f.readline().strip()", "def test_set_private_key_setter_encrypted_pem(self) -> None:\n\n expected = self.pem_private_key.decode()\n\n encryptor = DataEncryption()\n encryptor.set_private_key(\n self.encrypted_pem_private_key, password=self.private_key_password\n )\n\n # pylint: disable=protected-access\n actual = encryptor._loaded_private_key.private_bytes(\n serialization.Encoding.PEM,\n serialization.PrivateFormat.PKCS8,\n serialization.NoEncryption(),\n ).decode()\n\n self.assertEqual(expected, actual)", "def validate_privatekey_pem(key_pem):\n assert isinstance(key_pem, str)\n\n private_key_cryptography = serialization.load_pem_private_key(\n data=key_pem.encode('ascii'),\n password=None,\n backend=cryptography_default_backend\n )\n\n if not isinstance(private_key_cryptography, rsa.RSAPrivateKey):\n sys.exit('Unexpected private key type')\n\n return private_key_cryptography", "def unwrap(self):\n\n if self.algorithm == 'rsa':\n return self.asn1['private_key'].parsed\n\n if self.algorithm == 'dsa':\n params = self.asn1['private_key_algorithm']['parameters']\n return DSAPrivateKey({\n 'version': 0,\n 'p': params['p'],\n 'q': params['q'],\n 'g': params['g'],\n 'public_key': self.public_key.unwrap(),\n 'private_key': self.asn1['private_key'].parsed,\n })\n\n if self.algorithm == 'ec':\n output = self.asn1['private_key'].parsed\n output['parameters'] = self.asn1['private_key_algorithm']['parameters']\n output['public_key'] = self.public_key.unwrap()\n return output", "def test_private_key_pkey(self):\n priv = \"\"\"-----BEGIN PRIVATE KEY-----\nMIIBVAIBADANBgkqhkiG9w0BAQEFAASCAT4wggE6AgEAAkEAybxDeYLbbriv2wJ2\nd0w09xGJdi7dIzgPtI6beSKkk3ILXRqj59ufj/i7RXg7RASOzZH/wmfvbBNsI5y5\nM62FDwIDAQABAkB/ayvrKd3TV0+rsyiEPVwO2cLLJNqEDjrNPm2w21K71WMVkngm\nOH0DpFePpPHQf+EdUfpRwZNdXhyt52MxC4GxAiEA8FBZd1uqZ1PGrkety7EGgEJk\nBTrtu/WVLbGhbloNvr0CIQDW50RfhAmFJPh6bo4nKE/qtz5O0BVsoFQA8l7uB+eF\nuwIgC57HBLeBAOgTJmA+7ieMOe176qjT0A/q+7+oH67pFT0CIQDInpuAw6WTi2EA\nAsdoHMUGbEyZjL4Da2UggSNH+U8U0wIgR1ZLchEpsHafverbte2qHey/BSHyKEQi\ncCn1I7EnAH8=\n-----END PRIVATE KEY-----\"\"\"\n key = crypto.load_privatekey(PEM, priv)\n self.assertEqual(utils.private_key_type(key), c.KEY_RSA)", "def decrypt(self, encrypted_number):\n relevant_private_key = self.__keyring[encrypted_number.public_key]\n return relevant_private_key.decrypt(encrypted_number)", "def read(self):\n buff = self.conn.recv(4096)\n if (self.algo == \"rsa\"):\n buff = self.rsa_decrypt(buff)\n if (self.algo == \"des\"):\n buff = self.des_decrypt(buff)\n if (self.algo == \"3des\"):\n buff = self.triple_des_decrypt(buff)\n if (self.algo == \"aes\"):\n buff = self.aes_decrypt(buff)\n\n while buff.strip() != self.exitcode and len(buff) > 0:\n print 'Message received: ', buff.strip()\n #buff = self.rsa_decrypt(buff)\n buff = self.conn.recv(4096)\n\n if (self.algo == \"rsa\"):\n buff = self.rsa_decrypt(buff)\n if (self.algo == \"des\"):\n buff = self.des_decrypt(buff)\n if (self.algo == \"3des\"):\n buff = self.triple_des_decrypt(buff)\n if (self.algo == \"aes\"):\n buff = self.aes_decrypt(buff)\n # client disconnected\n self.stopWrite", "def get_verifying_key(private_key):\n return private_key.get_verifying_key().to_pem().decode('ascii')", "def can_decrypt(self, private_key, data_len):\n result = self._lib_vscf_ecc.vscf_ecc_can_decrypt(self.ctx, private_key.c_impl, data_len)\n return result", "def _decrypt(self, msg):\r\n # they must be real crypto experts at pubnub.com\r\n # two lines of code and two capital mistakes :-(\r\n # pylint: disable=E1101\r\n key = hashlib.sha256(self.cipher).hexdigest()[0:32]\r\n aes = AES.new(key, AES.MODE_CBC, \"0123456789012345\")\r\n decrypted = aes.decrypt(base64.decodestring(msg))\r\n return json.loads(decrypted[0:-ord(decrypted[-1])])", "def decrypt(self, key, msg, b64decode=True):\n if b64decode:\n msg = base64.b64decode(msg)\n iv = msg[:self.cipher.block_size]\n cipher = self.cipher.new(key, self.cipher.MODE_CBC, iv)\n\n padded = cipher.decrypt(msg[self.cipher.block_size:])\n l = ord(padded[-1:]) + 1\n plain = padded[:-l]\n return plain", "def asym_dec(self, ciph, keyfile):\n ciph = ciph.split('\\0')\n ciphkey_len = int(ciph[0])\n ciph = '\\0'.join(ciph[1:])\n ciphkey = ciph[:ciphkey_len]\n ciph = ciph[ciphkey_len:]\n\n passphrase = xsystem([self.sslname, 'rsautl', '-decrypt', '-inkey',\n keyfile], ciphkey)\n if not passphrase:\n warning('keymanagement: Unable to perform asymmetric decryption\\n')\n return None\n\n return self.sym_dec(ciph, passphrase)", "def decrypt_message(encrypted_message):", "def decrypt(key, ciphertext):\n data = fk(keyGen(key)[1], ip(ciphertext))\n return fp(fk(keyGen(key)[0], swapNibbles(data)))", "def decrypt(self, key, dir):\n self.encrypt(key, dir)", "def decrypt_kms_data(encrypted_data):\n if not AWS_REGION:\n return\n\n kms = boto3.client('kms', region_name=AWS_REGION)\n\n decrypted = kms.decrypt(CiphertextBlob=encrypted_data)\n\n if decrypted.get('KeyId'):\n # Decryption succeed\n decrypted_value = decrypted.get('Plaintext', '')\n if isinstance(decrypted_value, bytes):\n decrypted_value = decrypted_value.decode('utf-8')\n return decrypted_value", "def decrypt_data_key(self, dataKeyCypher, token, userGroup):\n masterKey = self.retrieve_master_key(token=token, userGroup=userGroup)\n box = secret.SecretBox(masterKey)\n if isinstance(dataKeyCypher, str):\n dataKeyCypher = dataKeyCypher.encode('cp855')\n try:\n plainText = box.decrypt(dataKeyCypher).decode('utf-8')\n except Exception:\n raise UnableToDecryptException(\"Unable to verify cyphertext/key pair\")\n return plainText", "def decrypt(self, encBytes):\r\n if not self.hasPrivateKey():\r\n raise AssertionError()\r\n if len(encBytes) != numBytes(self.n):\r\n return None\r\n c = bytesToNumber(encBytes)\r\n if c >= self.n:\r\n return None\r\n m = self._rawPrivateKeyOp(c)\r\n decBytes = numberToByteArray(m, numBytes(self.n))\r\n #Check first two bytes\r\n if decBytes[0] != 0 or decBytes[1] != 2:\r\n return None\r\n #Scan through for zero separator\r\n for x in range(1, len(decBytes)-1):\r\n if decBytes[x]== 0:\r\n break\r\n else:\r\n return None\r\n return decBytes[x+1:] #Return everything after the separator\r" ]
[ "0.6939709", "0.66746044", "0.66666764", "0.65895903", "0.6577401", "0.6512976", "0.6444183", "0.6393576", "0.6384379", "0.6342567", "0.631341", "0.6248368", "0.61970115", "0.6189783", "0.6187942", "0.6154129", "0.6141176", "0.61391276", "0.6136478", "0.6121558", "0.6083108", "0.60737604", "0.6073027", "0.60723424", "0.60694736", "0.60672796", "0.606718", "0.6058194", "0.6057818", "0.59851384", "0.5979333", "0.5964565", "0.5937688", "0.5935424", "0.5925538", "0.59164447", "0.59075105", "0.58856773", "0.5884226", "0.5882952", "0.58624846", "0.5859386", "0.585431", "0.58526033", "0.58490914", "0.58475435", "0.58381295", "0.5836204", "0.58255666", "0.5819845", "0.5813926", "0.58094317", "0.5802788", "0.57979923", "0.5781966", "0.5777475", "0.57649106", "0.57639116", "0.5756856", "0.5755679", "0.5750058", "0.5749336", "0.5743981", "0.57413846", "0.5738796", "0.5735998", "0.5717154", "0.5714001", "0.56963605", "0.56932825", "0.5692846", "0.56809986", "0.56729895", "0.56729895", "0.5671771", "0.5671156", "0.5670522", "0.56705195", "0.56700504", "0.5667654", "0.56668895", "0.56647784", "0.56643", "0.5652934", "0.5649959", "0.5638057", "0.5630504", "0.5604527", "0.56007504", "0.5598946", "0.5592175", "0.55731446", "0.55448246", "0.5518169", "0.55157876", "0.55076045", "0.5507205", "0.5499821", "0.5499705", "0.54893285" ]
0.5849048
45
Read password from potentially gpgencrypted file.
def load_password(fn): if not fn: return None data = load_gpg_file(fn) data = data.strip(b'\n') return data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_key():\n return open(\"pass.key\", \"rb\").read()", "def _PasswordFromFile(self, hostname, port, username):\n\n\t\tif username == None or len(username) == 0:\n\t\t\treturn None\n\n\t\tif hostname == None or hostname == UNIX_DOMAIN_PATH:\n\t\t\thostname = DefaultHost\n\n\t\tpgpassfile = self._getPoolPassFilename()\n\t\tif not os.path.exists(pgpassfile):\n\t\t\tif self.Pfdebug:\n\t\t\t\tself.Pfdebug.write(f'WARNING: password file \"{pgpassfile}\" does not exist\\n')\n\t\t\treturn None\n\n\t\t# If password file cannot be opened, ignore it.\n\t\tstat_buf = None\n\t\ttry:\n\t\t\tstat_buf = os.stat(pgpassfile)\n\t\texcept Exception:\n\t\t\treturn None\n\n\t\tst_mode = stat_buf.st_mode\n\t\tif not stat.S_ISREG(st_mode):\n\t\t\tif self.Pfdebug:\n\t\t\t\tself.Pfdebug.write(f'WARNING: password file \"{pgpassfile}\" is not a plain file\\n')\n\t\t\treturn None\n\t\t\n\t\t# If password file is insecure, alert the user and ignore it.\n\t\tif stat.S_IRWXG & st_mode or stat.S_IRWXO & st_mode:\n\t\t\tif self.Pfdebug:\n\t\t\t\tself.Pfdebug.write(f'WARNING: password file \"{pgpassfile}\" has group or world access; permissions should be u=rw (0600) or less\\n')\n\t\t\treturn None\n\t\t\n\t\tfp = open(pgpassfile, 'r')\n\t\tif fp == None:\n\t\t\treturn None\n\n\t\tfor line in fp:\n\t\t\t#Remove trailing newline\n\t\t\tline = line.strip('\\n')\n\t\t\tif 0 == len(line):\n\t\t\t\tcontinue\n\t\t\tparts = re.split('[^\\\\\\\\]:', line)\n\t\t\tif len(parts) < 4:\n\t\t\t\tprint('Warning: Invalid pgpass entry')\n\t\t\t\tcontinue\n\t\t\tif parts[0] == hostname and parts[1] == port and parts[2] == username:\n\t\t\t\t# Deescape password\n\t\t\t\tlast_part = parts[3].replace('\\\\','')\n\t\t\t\tfp.close()\n\t\t\t\treturn last_part\n\t\tfp.close()\n\t\treturn None", "def read_key():\n path = os.path.join(os.path.dirname(__file__), 'data')\n f = open(os.path.join(path, 'credential.txt'), 'r')\n key = f.read()\n f.close()\n return key", "def from_file(self, file):\n must_close = False\n if isinstance(file, str):\n try:\n file = open(file, \"rb\")\n except (FileNotFoundError, PermissionError) as e:\n raise GPG.DecryptionException(str(e))\n else:\n must_close = True\n result = subprocess.run(\n [GPG.bin, \"--decrypt\"],\n input=file.read(),\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE\n )\n if must_close:\n file.close()\n if result.returncode == 0:\n data = result.stdout\n return data\n else:\n raise GPG.DecryptionException(result.stderr)", "def load_key():\n return open(\"Secret.key\",\"rb\").read()", "def read_passwords_file(password_file):\n return [(line[0], line[1].strip('\\n')) for line in\n map(lambda x: x.split('='), open(password_file, 'r'))]", "def load_login_file(fpath):\n with open(fpath) as f:\n name = f.readline().rstrip('\\n')\n passwd = f.readline().rstrip('\\n')\n return name, passwd", "def load_key():\n return open(\"secret.key\", \"rb\").read()", "def decrypt(self, path):\n with open(path, \"rb\") as fileh:\n gpg = qpgpg.GPG()\n try:\n decrypted = gpg.decrypt_file(fileh)\n except qpgpg.GPG.DecryptionException:\n raise\n else:\n return decrypted", "def part4b(filename, password=None):\n file_object = open(filename, \"r\")\n encodedUser = file_object.readline()\n encodedPass = file_object.readline()\n message_bytes = base64.b64decode(encodedUser)\n decryptedUser = message_bytes.decode(\"utf-8\")\n message_bytes = base64.b64decode(encodedPass)\n decryptedPass = message_bytes.decode(\"utf-8\")\n print(decryptedUser)\n print(decryptedPass)\n file_object.close()\n if(password != None):\n part4a(filename,decryptedUser,password)", "def read(self):\n with open(expanduser(self.vault_file), \"rb\") as vault_file:\n encrypted = vault_file.read()\n\n vault_lib = VaultLib(self.secrets.items())\n plaintext = vault_lib.decrypt(encrypted, filename=self.vault_file)\n return load(plaintext, Loader=SafeLoader)", "def passwordFileToDict(filename):\n passwords = {}\n\n file = open(filename, 'r')\n for line in file:\n\n if line and line.count(':'):\n\n username, password = line.strip().split(':')\n\n passwords[bytes(username,\"utf-8\")] = bytes(password,\"utf-8\")\n\n return passwords", "def read_enc_settings():\n print(\"Decrypting {}\".format(ENC_SETTINGS))\n try:\n output = subprocess.check_output(['gpg', '-d', ENC_SETTINGS])\n except subprocess.SubprocessError:\n print(\"Decryption failed, ignoring\")\n return\n config = ConfigParser()\n config.read_string(output.decode('utf8', errors='ignore'))\n return config", "def part4b(filename, password=None):\n f= open(filename,\"r\")\n contents = f.readlines()\n for i in contents:\n decodedVersion = base64.b64decode(i)\n print(decodedVersion)", "def getSecret(self):\n\n with open(self._secret_file) as f:\n secret=f.readline().rstrip()\n \n return secret", "def read_in_xforce_keys(file):\n key = file.readline().strip()\n password = file.readline().strip()\n if validate_api_creds(key) and validate_api_creds(password):\n return key, password\n else:\n print(\"API credentials invalid. Please check your key and password. Exiting...\")\n sys.exit(1)", "def read(path, password=None):\n if password is None: # unencrypted\n with open(path, 'r') as config_file:\n return json.loads(config_file.read())\n else:\n raise NotImplementedError(\"encryption not implemented\")", "def pw_file(self):\r\n return self._pw_file", "def get_credentials(path='~/.pgpass', db=DB):\n\n # Load credentials from path\n with open(os.path.expanduser(path), 'r') as file:\n host, port, _, user, password = file.read().strip().split(':')\n \n return host, port, user, password, db", "def get_password(USERNAME):\n\n FILENAME = '/run/secrets/{}'.format(USERNAME)\n if os.path.isfile(FILENAME):\n with open(FILENAME) as f:\n DB_PASSWORD = f.read().splitlines()[0]\n else:\n DB_PASSWORD = 'postgres'\n return DB_PASSWORD", "def passphrase_file(passphrase=None):\n cmd = []\n pass_file = None\n if not passphrase and 'CRYPTORITO_PASSPHRASE_FILE' in os.environ:\n pass_file = os.environ['CRYPTORITO_PASSPHRASE_FILE']\n if not os.path.isfile(pass_file):\n raise CryptoritoError('CRYPTORITO_PASSPHRASE_FILE is invalid')\n elif passphrase:\n tmpdir = ensure_tmpdir()\n pass_file = \"%s/p_pass\" % tmpdir\n p_handle = open(pass_file, 'w')\n p_handle.write(passphrase)\n p_handle.close()\n\n if pass_file:\n cmd = cmd + [\"--batch\", \"--passphrase-file\", pass_file]\n\n vsn = gpg_version()\n if vsn[0] >= 2 and vsn[1] >= 1:\n cmd = cmd + [\"--pinentry-mode\", \"loopback\"]\n\n return cmd", "def get_credentials_from_file(credentials_file):\n # Change the scope username and password variables to global\n global username\n global password\n try:\n # Open and reads the credentials.pwd file and save the lines in the username and password\n with open(os.path.dirname(__file__) + credentials_file) as credential_file:\n credentials = credential_file.readlines()\n username = credentials[0].strip()\n password = credentials[1].strip()\n\n credential_file.close()\n except FileNotFoundError as error:\n print(error)\n sys.exit(1)", "def load_cryptopals(file_path):\r\n records = []\r\n with open(file_path,'rb') as cpals:\r\n x = cpals.readline();\r\n while x is not None and x != b'':\r\n if (len(x)%2!=0): x = x[:-1]; # strip the last character if the line is uneven.\r\n records.append(to_hex(x));\r\n print(x)\r\n x = cpals.readline();\r\n return records; # gets the hashed records. now to pair them with their most likely candidates (brute force if necessary.)\r", "def decrypt_data(self, master_pass, website, filename): \n\n if os.path.isfile(filename):\n try:\n with open(filename, 'r') as jdata:\n jfile = json.load(jdata)\n nonce = bytes.fromhex(jfile[website][\"nonce\"])\n password = bytes.fromhex(jfile[website][\"password\"])\n except KeyError:\n raise PasswordNotFound\n else:\n raise PasswordFileDoesNotExist\n # add extra characters and take first 16 to make sure key is right.\n formatted_master_pass = master_pass + \"================\"\n master_pass_encoded = formatted_master_pass[:16].encode(\"utf-8\")\n cipher = AES.new(master_pass_encoded, AES.MODE_EAX, nonce = nonce)\n plaintext_password = cipher.decrypt(password).decode(\"utf-8\")\n\n return plaintext_password", "def get_pass(self, item):\n text = str(self.get_contents(item), encoding=\"utf-8\")\n lines = text.split(\"\\n\")\n password = lines[0]\n return password", "def read_password_db(fname='input.txt'):\n with open(fname) as fp:\n matches = [regex.match(line) for line in fp.readlines()]\n \n passwords = [{\n 'num_1': int(m.group(1)),\n 'num_2': int(m.group(2)),\n 'character': m.group(3),\n 'password': m.group(4)\n } for m in matches]\n \n return passwords", "def _load_psk(self, psk_file):\n with open(psk_file, 'rb') as f:\n return f.read().rstrip()", "def testPassword(cryptPass, dictionaryFile):\n #salt = cryptPass[0:2]\n salt = crypt.mksalt(crypt.METHOD_SHA512) # Updated for SHA512 encrypted passwords\n dictFile = open(dictionaryFile, 'r')\n for word in dictFile.readlines():\n word = word.strip('\\n')\n cryptWord = crypt.crypt(word, salt)\n \n if cryptWord == cryptPass:\n print('[+] Found Password: ' + word + '\\n')\n return\n print('[-] Password Not Found.\\n')\n return", "def recover_encrypt_pass(self):\n with open(self.key_path) as input_file:\n key = input_file.readlines()\n cipher_suite = Fernet(key[0])\n bin_passwd = bytes(self.password, 'utf-8')\n ciphered_text = cipher_suite.encrypt(bin_passwd)\n return ciphered_text", "def get_token_from_secret_file(secret_file_path):\n try:\n with open(secret_file_path, \"r\") as f:\n return f.readline()\n except FileNotFoundError:\n raise BaseSpaceDownloadError(\"Secret file not found\")\n except PermissionError:\n raise BaseSpaceDownloadError(\"No permissions to read secret file\")", "def check_gpg_password(self, gpg_fingerprint):\n cmd = [\n \"gpg\",\n \"--batch\",\n \"--pinentry-mode\",\n \"loopback\",\n f\"--passphrase-file={env.GPG_PASS_FILE}\",\n \"--dry-run\",\n \"--passwd\",\n gpg_fingerprint,\n ]\n _p, output, error_msg = popen_communicate(cmd) # type: ignore\n if _p.returncode or error_msg:\n raise Exception(error_msg)\n # raise BashCommandsException(p.returncode, output, error_msg, str(cmd))\n\n # try:\n # run(cmd)\n # except Exception as e:\n # print_tb(e)\n # raise e", "def breakPassword(filename):\n encryptedFile = open(filename, 'rb')\n pdfReader = PyPDF2.PdfFileReader(encryptedFile)\n\n with open('dictionary.txt') as words:\n wordList = words.read().split('\\n')\n\n for word in wordList:\n wordLower = word.lower()\n wordCap = word.capitalize()\n\n if pdfReader.decrypt(word):\n return word\n elif pdfReader.decrypt(wordCap):\n return wordCap\n elif pdfReader.decrypt(wordLower):\n return wordLower\n\n return", "def testRead(self):\n path_spec = path_spec_factory.Factory.NewPathSpec(\n definitions.TYPE_INDICATOR_TSK, location='/passwords.txt',\n inode=self._IDENTIFIER_PASSWORDS_TXT, parent=self._bde_path_spec)\n file_object = tsk_file_io.TSKFile(self._resolver_context, path_spec)\n\n self._TestRead(file_object)", "def testRead(self):\n path_spec = path_spec_factory.Factory.NewPathSpec(\n definitions.TYPE_INDICATOR_TSK, location='/passwords.txt',\n inode=self._IDENTIFIER_PASSWORDS_TXT, parent=self._bde_path_spec)\n file_object = tsk_file_io.TSKFile(self._resolver_context, path_spec)\n\n self._TestRead(file_object)", "def read(self):\n found = False\n if os.path.exists(self.user_file):\n if os.path.getsize(self.user_file) > 0:\n f = open(self.user_file, \"rb\")\n data = f.read()\n self.screen_name, self.access_key, self.access_secret = data.split() # split the line by space token\n f.close()\n found = True\n return found", "def load_key(fn, psw=None):\n if not fn:\n die(\"Need private key\")\n if psw:\n psw = as_bytes(psw)\n data = load_gpg_file(fn)\n key = load_pem_private_key(data, password=psw, backend=get_backend())\n return key", "def load_credential_file(self, path):\r\n c_data = StringIO.StringIO()\r\n c_data.write(\"[Credentials]\\n\")\r\n for line in open(path, \"r\").readlines():\r\n c_data.write(line.replace(\"AWSAccessKeyId\", \"aws_access_key_id\").replace(\"AWSSecretKey\", \"aws_secret_access_key\"))\r\n c_data.seek(0)\r\n self.readfp(c_data)", "def import_credentials(password, cred_file):\n\t\tself.exchanges = decrypt(password, cred_file)", "def get_correct_pw_md5():\n f = open(PASSWORD_FILE, 'r')\n pw_md5 = f.read().strip()\n f.close()\n return pw_md5", "def read_public_key(f: IO[str]) -> Tuple[str, str, str, str]:\n data = f.read()\n try:\n kind, key, comment = data.split(\" \")\n if kind.startswith(\"ssh-\") and comment:\n base64.b64decode(key)\n return (kind, key, comment, data)\n except ValueError:\n pass\n\n raise click.ClickException(\"{} is not a valid SSH key\".format(f.name))", "def load_credentials(path: str = 'credentials.txt', user_index: int = 0):\n assert os.path.exists(path), f\"Specificy login credentials in {path}\"\n with open(path) as cfile:\n lines = cfile.readlines()\n assert len(lines) >= 2, f\"Must have line for username and password in {path}\"\n # return username (first line) and password (second line)\n return lines[2 * user_index].strip(), lines[(2 * user_index) + 1].strip()", "def passwd_decryption(self):\n with open(self.key_path, 'rb') as input_key:\n for line in input_key:\n key = line\n with open(self.pass_path, 'rb') as input_password:\n for line in input_password:\n password = line\n cipher_suit = Fernet(key)\n plain_password = cipher_suit.decrypt(password)\n plain_password = bytes(plain_password).decode('utf-8')\n \n return plain_password", "def get_passwd(self):\n if self.__password:\n aes_cipher = AESCipher()\n return aes_cipher.decrypt(self.__password, self.__aes_key)", "def __getitem__(self, key):\n filename = os.path.join(self.pwfiles[key], key) + '.pw'\n with open(filename, 'rb') as f:\n buf = f.read()\n if buf[:4] == \"PAW2\":\n inp = PAW2_Buffer()\n elif buf[:4] == \"PAWD\":\n inp = PAWD_Buffer()\n else:\n raise StorageError(\"'%s' is not a PasswordFile.\" % filename)\n inp.unpack(buf)\n if inp.key != key:\n raise StorageError(\"File doesn't match the key '%s'.\" % inp.key)\n return inp", "def decode_encrypted_wallet(password: str, path=\"wallet.dat\"):\n db = DB()\n db.open(path, \"main\", DB_BTREE, DB_THREAD | DB_RDONLY)\n data = defaultdict(list)\n\n # iterate database\n for k, v in db.items():\n key_name = k[1:1+k[0]].decode()\n if key_name == 'ckey':\n # encryptedKey: [(py, encrypted_sk), ]\n data[key_name].append((k[6:6+33], v[1:1+96]))\n elif key_name == 'mkey':\n # masterKey: encrypted_key, salt, DerivationIterations\n data[key_name] = [v[1:1+48], v[50:50+8], int.from_bytes(v[4+58:4+58+8], 'little')]\n elif key_name == 'key':\n # normalKey:\n raise Exception('this wallet is not encrypted!')\n db.close()\n\n # decrypt\n cp = Pycrypto.set_key_from_passphrase(\n password.encode(), data['mkey'][1], data['mkey'][2])\n mk = cp.decrypt(data['mkey'][0]) # import masterKey as key\n cp.set_key(mk)\n for pk, encrypted_sk in data['ckey']:\n cp.set_iv(double_hash(pk)) # import doubleHashed pk as IV\n sk = cp.decrypt(encrypted_sk)\n if sk2pk(sk) != pk:\n raise Exception('wrong password! {} {}'.format(sk2pk(sk).hex(), pk.hex()))\n ck = hashlib.new('ripemd160', hashlib.sha256(pk).digest()).digest()\n yield sk, pk, ck", "def fingerprint_from_file(filename):\n cmd = flatten([gnupg_bin(), gnupg_home(), filename])\n outp = stderr_output(cmd).split('\\n')\n if not outp[0].startswith('pub'):\n raise CryptoritoError('probably an invalid gpg key')\n\n return outp[1].strip()", "def get_password(username, interactive=sys.stdout.isatty()):\n try:\n return get_password_from_keyring(username)\n except PyiCloudNoStoredPasswordAvailableException:\n if not interactive:\n raise\n\n return getpass.getpass(\n \"Enter iCloud password for {username}: \".format(username=username,)\n )", "def _load_key(self, path):\n with open(path, 'r') as f:\n self._key = f.readline().strip()\n self._secret = f.readline().strip()", "def get_password_from_keyring(username):\n result = keyring.get_password(KEYRING_SYSTEM, username)\n if result is None:\n raise PyiCloudNoStoredPasswordAvailableException(\n \"No pyicloud password for {username} could be found \"\n \"in the system keychain. Use the `--store-in-keyring` \"\n \"command-line option for storing a password for this \"\n \"username.\".format(username=username,)\n )\n\n return result", "def _load_password():\n password = session.get('password')\n\n if password is None:\n g.password = None\n else:\n g.password = password", "def decrypt(data):\n gpg = gnupg.GPG()\n gpgChunks = [str for str in data.split(\"\\n\\n\")[:-1]]\n decrypted = \"\"\n for chunk in gpgChunks:\n\tchunk = \"-----BEGIN PGP MESSAGE-----\\n\\n\" + chunk + \"\\n-----END PGP MESSAGE-----\\n\"\n\twith open('/tmp/temp.gpg', 'w+') as f:\n\t f.write(chunk)\n\twith open(os.devnull, 'w') as dn:\n\t # dirty hack but the GPG library is horrible\n\t decrypted += check_output('gpg --decrypt /tmp/temp.gpg', shell=True, stderr=dn)[:-1]\n return decrypted", "def read_keys(path):\n with open(path) as walletfile:\n b_keys = walletfile.read()\n p_keys = base64.b64decode(b_keys)\n return pickle.loads(p_keys)", "def get_raw_secret_from_file(\n secret: PotentialSecret,\n line_getter_factory: Callable[[str], 'LineGetter'] = open_file,\n) -> Optional[str]:\n if not secret.line_number:\n raise NoLineNumberError\n\n for item in get_raw_secrets_from_file(secret, line_getter_factory):\n return item.secret_value\n\n raise SecretNotFoundOnSpecifiedLineError(secret.line_number)", "def get_secret():\n if not DEFAULT_KEY_FILE.exists():\n raise Exception(\"Authentication key must be stored in a file named \" + DEFAULT_KEY_FILE.name)\n\n retval = DEFAULT_KEY_FILE.read_text().strip()\n if not retval or len(retval) < 10:\n raise Exception(\"Invalid authentication token\")\n return retval", "def _decrypt_pvtkey(self, pvtkey_file: str, passphrase: str) -> str:\n\n keydata: str = None\n if pvtkey_file:\n try:\n keydata = asyncssh.public_key.read_private_key(pvtkey_file,\n passphrase)\n except Exception as e:\n self.logger.error(\n f\"ERROR: Unable to read private key file {pvtkey_file}\"\n f\"for jump host due to {str(e)}\")\n\n return keydata", "def find_pass_cfg(file_path=None):\n if not file_path:\n file_path = '~/.pass.cfg'\n\n if os.path.isfile(os.path.expanduser(file_path)):\n return file_path\n else:\n return None", "def load_credentials(pwd: str):\n\n try:\n with open(\"credentials.bin\", 'rb') as creds:\n user, client_id, secret, (x, y), checksum = creds.read().split(b'\\0')\n except ValueError as e:\n raise ValueError(\"Corrupted credentials store.\") from e\n\n pwd = pwd.encode()\n x = reduce(xor, pwd, x)\n pwd = xor_crypt(pwd, x)\n y = reduce(xor, pwd, x^y)\n secret = xor_crypt(secret, pwd)\n secret = xor_crypt(secret, y)\n user = xor_crypt(user, y)\n client_id = xor_crypt(client_id, y)\n\n checksum = xor_crypt(checksum, y)\n if xor_crypt(secret[7::-1]+secret[-1:-9:-1], xor_crypt(pwd, x)) == checksum:\n return [b.decode() for b in (user, client_id, secret)]", "def get_pem():\n try:\n with open('encrypted_pem.txt', 'r') as encrypted_pem:\n pem_file = encrypted_pem.read()\n\n kms = boto3.client('kms', region_name=REGION)\n return kms.decrypt(CiphertextBlob=b64decode(pem_file))['Plaintext']\n except (IOError, ClientError, KeyError) as err:\n LOGGER.error(err)\n return False", "def read_ciphertext(filepath):\n with open(filepath, 'rb') as cipher_file:\n return cipher_file.read()", "def read(self):\n if self.cleartext is None:\n if os.path.exists(self.encrypted_filename):\n self.cleartext = self._decrypt()\n else:\n self.cleartext = ''\n return self.cleartext", "def _get_bytes_from_pem_file(fpath: str) -> bytes:\n with open(fpath, \"rb\") as f:\n return f.read()", "def main(dictionaryFile, passwordFile):\n passFile = open(passwordFile)\n for line in passFile.readlines():\n if ':' in line:\n user = line.split(':')[0]\n cryptPass = line.split(':')[1].strip(' ')\n print('[*] Cracking Password For: ' + user)\n testPassword(cryptPass, dictionaryFile)", "def read_cipher_alphabet(filename: str) -> str:\n file_openend = open(filename)\n\n return file_openend.read()", "def parse_secrets_file(self, path_to_file) -> dict:\n config = self.import_secrets_file(path_to_file)\n\n self.traverse_and_decrypt(config)\n\n return config", "def decrypt(self,password,indata):\n key = hashlib.sha256(password).digest()\n return decrypt_file(key,indata)", "def get_credentials(key):\n with open(\"credentials.json\", \"r\") as credentials_file:\n credentials_data = json.load(credentials_file)\n\n try:\n return credentials_data[key]\n except KeyError:\n raise KeyError(f\"Credential {key} was not found in file.\")", "def define_login_password():\n file_path = 'login.txt'\n logger.debug('Loading login information')\n login_info = {}\n f = open(file_path, 'rb')\n for l in f.readlines():\n if 'login' in l:\n login_info['login'] = l.split('=')[1].strip()\n else:\n login_info['pwd'] = l.split('=')[1].strip()\n return login_info", "def read_key(self, keyfile_name):\n\n with open(keyfile_name, 'rb') as f:\n self.key = f.read()\n self.cryptor = Fernet(self.key)", "def decrypt_reader(in_path, password, key_length=32):\n with open(in_path, 'rb') as in_file:\n password = str.encode(password)\n block_size = AES.block_size\n salt = in_file.read(block_size)#[len(b'Salted__'):]\n key, i_v = derive_key_and_iv(password, salt, key_length, block_size)\n cipher = AES.new(key, AES.MODE_CBC, i_v)\n next_chunk = b''\n finished = False\n csv_bytes = b''\n while not finished:\n chunk = next_chunk\n next_chunk = cipher.decrypt(in_file.read(1024 * block_size))\n if len(next_chunk) == 0:\n padding_length = chunk[-1]\n if padding_length < 1 or padding_length > block_size:\n raise ValueError(\"Password incorrect\")\n chunk = chunk[:-padding_length]\n finished = True\n csv_bytes += chunk\n newline_pos = csv_bytes.find(b\"\\r\\n\")\n while newline_pos != -1:\n try:\n yield bytes_to_csv(csv_bytes[:(newline_pos + 2)])[0]\n except:\n raise ValueError(\"Password incorrect\")\n csv_bytes = csv_bytes[(newline_pos + 2):]\n newline_pos = csv_bytes.find(b\"\\r\\n\")", "def get_private_key():\n if not os.path.exists(_private_key_path):\n return None\n\n try:\n with open(_private_key_path) as secret_file:\n return secret_file.read()\n\n except Exception as exc:\n log.error(f'Could not read private key.\\n{exc}')\n traceback.print_exc(file=sys.stderr)", "def dencrypt(command, pw, data):\n if '\\n' in pw:\n raise Exception('Newlines not allowed in passwords')\n proc = subprocess.Popen(\n command,\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE\n )\n proc.stdin.write(force_bytes(pw))\n proc.stdin.write(b'\\n')\n proc.stdin.write(data)\n output, erroroutput = proc.communicate()\n if proc.returncode != 0:\n raise gpg_exception_factory(proc.returncode, erroroutput)\n return output", "def import_key(self, filename):\n fields = self.input_file(filename)\n\n if (\"Description\" not in fields or \"Method\" not in fields or\n \"Key length\" not in fields or \n \"Secret key\" not in fields or\n fields[\"Method\"] != \"AES\"):\n raise Exception(\"Error reading AES key file.\")\n # print (fields)\n key = fields['Secret key']\n key = binascii.unhexlify(key)\n key_len = int(fields[\"Key length\"], 16)\n if len(key) != key_len:\n raise Exception(\"AES key file contains false information.\")\n \n return key", "def passphrase(self):\n password = self.entry.password\n if password:\n return self.entry.password.encode('UTF-8')\n else:\n return None", "def load_key(self):\n\t return open(\"key.key\", \"rb\").read()", "def __input_encrypted(self, filename):\n fields = self.input_file(filename)\n\n if (\"Description\" not in fields or \"Method\" not in fields or\n \"Data\" not in fields or \"IV\" not in fields or\n fields[\"Method\"] != \"AES\"):\n raise Exception(\"AES crypted file not formated correctly.\")\n\n data = fields[\"Data\"]\n iv = fields[\"IV\"]\n return binascii.unhexlify(iv) + base64.b64decode(data)", "def _find_password(fp: Iterable[str], url: str, username: str) -> Optional[str]:\n parser = configparser.ConfigParser()\n parser.read_file(fp)\n sections = (dict(parser.items(name)) for name in parser.sections())\n return next(\n (\n s[_key_password]\n for s in sections\n if all(k in s for k in _section_keys)\n and s[_key_repo].startswith(url)\n and s[_key_username] == username\n ),\n None,\n )", "async def read_secret(self, name: str):\n pass", "def grab_or_generate_secret_key(secret_file_path):\n try:\n secret_key = open(secret_file_path).read().strip()\n except IOError:\n try:\n from random import SystemRandom\n valid_chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'\n secret_key_as_list = [SystemRandom().choice(valid_chars) for i in range(50)]\n secret_key = ''.join(secret_key_as_list)\n secret = file(secret_file_path, 'w')\n secret.write(secret_key)\n secret.close()\n except IOError:\n Exception('Please create a %s file with random characters \\\n to generate your secret key!' % secret_file_path)\n\n return secret_key", "def grr_pwd(line: Text) -> Text:\n del line # Unused.\n return magics_impl.grr_pwd_impl()", "def get_user_password(sockfile):\n return (\"root\", \"fnxm\")", "def decrypt_using_gpg(self, gpg_file, extract_target=None):\n if not os.path.isfile(f\"{gpg_file}.gpg\"):\n os.symlink(gpg_file, f\"{gpg_file}.gpg\")\n\n gpg_file_link = f\"{gpg_file}.gpg\"\n tar_fn = f\"{gpg_file}.tar.gz\"\n try:\n cmd = [\n \"gpg\",\n \"--verbose\",\n \"--batch\",\n \"--yes\",\n f\"--output={tar_fn}\",\n \"--pinentry-mode\",\n \"loopback\",\n f\"--passphrase-file={env.GPG_PASS_FILE}\",\n \"--decrypt\",\n gpg_file_link,\n ]\n run(cmd, suppress_stderr=True)\n log(f\"#> GPG decrypt {ok()}\")\n _remove(gpg_file)\n os.unlink(gpg_file_link)\n except Exception as e:\n print_tb(e)\n raise e\n # finally:\n # os.unlink(gpg_file_link)\n\n if extract_target:\n try:\n untar(tar_fn, extract_target)\n except Exception as e:\n raise Exception(\"Could not extract the given tar file\") from e\n finally:\n cmd = None\n _remove(f\"{extract_target}/.git\")\n _remove(tar_fn)", "def Decrypt(self, data):\n\n data = base64.b64decode(data)\n es = AES.new(self.creds.aesKey, AES.MODE_CBC, self.creds.aesIV)\n solved = \"\"\n try:\n solved = es.decrypt(data)\n except ValueError:\n stdout.write(\"Error, corrupted file.\\n\\n\")\n return \"%errorpass:1234123412341234%\"\n\n return solved", "def _updatePgPassFile():\n try:\n #backup existing .pgpass\n if os.path.exists(basedefs.DB_PASS_FILE):\n backupFile = \"%s.%s\" % (basedefs.DB_PASS_FILE, utils.getCurrentDateTime())\n logging.debug(\"found existing pgpass file, backing current to %s\" % (backupFile))\n os.rename(basedefs.DB_PASS_FILE, backupFile)\n\n with open(basedefs.DB_PASS_FILE, \"w\") as pgPassFile:\n\n # Add header and opening lines\n pgPassFile.write(basedefs.PGPASS_FILE_HEADER_LINE + \"\\n\")\n pgPassFile.write(basedefs.PGPASS_FILE_OPENING_LINE + \"\\n\")\n\n # Create credentials lines\n adminLine = \"# %s.\" % basedefs.PGPASS_FILE_ADMIN_LINE\n userLine = \"# %s.\" % basedefs.PGPASS_FILE_USER_LINE\n\n pgPassFile.write(adminLine + \"\\n\")\n\n # Use parameters received from the user and skip if the install is local\n if \"DB_ADMIN\" in controller.CONF.keys():\n logging.info(\"Using db credentials provided by the user\")\n\n # Create user lines\n pgPassFile.write(userLine + \"\\n\")\n pglines = _updatePgPassLine(controller.CONF[\"DB_HOST\"], controller.CONF[\"DB_PORT\"],\"*\",\n controller.CONF[\"DB_ADMIN\"], controller.CONF[\"DB_PASS\"])\n else:\n logging.info(\"Using default db credentials\")\n\n # Create an admin user line\n pglines = _updatePgPassLine(controller.CONF[\"DB_HOST\"], basedefs.DB_PORT, \"*\", basedefs.DB_ADMIN, controller.CONF[\"DB_PASS\"])\n\n # Add users\n pglines = pglines + \"\\n\" + userLine + \"\\n\"\n pglines = pglines + _updatePgPassLine(controller.CONF[\"DB_HOST\"], basedefs.DB_PORT, \"*\", basedefs.DB_USER, controller.CONF[\"DB_PASS\"])\n\n pgPassFile.write(pglines + \"\\n\")\n pgPassFile.write(basedefs.PGPASS_FILE_CLOSING_LINE + \"\\n\")\n\n #make sure the file has still 0600 mod\n os.chmod(basedefs.DB_PASS_FILE, 0600)\n\n except:\n logging.error(traceback.format_exc())\n raise Exception(output_messages.ERR_UPD_DB_PASS)", "def getString(self, cred_file=None):\n\n cred_data = ''\n if not cred_file:\n # If not file specified, assume the file used to generate Id\n cred_file = self.getIdFilename()\n try:\n data_fd = open(cred_file)\n cred_data = data_fd.read()\n data_fd.close()\n except:\n # This credential should not be advertised\n self.advertize = False\n logSupport.log.exception(\"Failed to read credential %s: \" % cred_file)\n return cred_data", "def Load(self, filename):\n logging.info(\"Reading users file at %s\", filename)\n try:\n try:\n contents = utils.ReadFile(filename)\n except EnvironmentError as err:\n self._users = None\n if err.errno == errno.ENOENT:\n logging.warning(\"No users file at %s\", filename)\n else:\n logging.warning(\"Error while reading %s: %s\", filename, err)\n return False\n\n users = http.auth.ParsePasswordFile(contents)\n\n except Exception as err: # pylint: disable=W0703\n # We don't care about the type of exception\n logging.error(\"Error while parsing %s: %s\", filename, err)\n return False\n\n self._users = users\n\n return True", "def get_secret_key():\n try:\n with open(os.path.join(directory, \"SECRET_KEY\")) as f:\n secret_key = f.readlines()[0].strip()\n if len(secret_key) < 16:\n raise ValueError\n return secret_key\n except Exception as excep:\n raise ConfigurationError(\n \"Please create a SECRET_KEY file in {} with a random string \"\n \"of at least 16 characters\".format(directory)\n ) from excep", "def redis_pwd():\n with open(\"/etc/redis/redis.conf\") as fd:\n secret_cfg = fd.read().splitlines()\n\n for line in secret_cfg:\n line = line.strip()\n if line.startswith(\"requirepass\"):\n return line.split(\" \")[1].strip()\n return ''", "def load_passwords(file: str, sep: str):\n\tpasswords = []\n\tfor cred in open(file, \"r\"):\n\t\tcred = cred.strip(\"\\n\")\n\t\t# Check to make sure sep exists so it doesn't split at the nothingth char\n\t\tif sep != '':\n\t\t\tcred = cred.split(sep)\n\t\t\tpasswords.append(cred[1])\n\t\telse:\n\t\t\tpasswords.append(cred)\n\treturn passwords", "def get_password(self, service, username):\n init_part = self._keyring.get_password(service, username)\n if init_part:\n parts = [init_part]\n i = 1\n while True:\n next_part = self._keyring.get_password(\n service, '%s{{part_%d}}' % (username, i)\n )\n if next_part:\n parts.append(next_part)\n i += 1\n else:\n break\n return ''.join(parts)\n return None", "def _get_password(self):\r\n return self._password", "def load_private_key(file_path: str, password: bytes = None,\n encoding: Encoding = None) -> PrivateKey:\n real_encoding = encoding or _get_encoding_type(file_path)\n\n def solve(key_data: bytes) -> PrivateKey:\n \"\"\"Determine the type of data and perform loading based on data type.\n\n :param key_data: given private keys data\n :return: loaded private key\n \"\"\"\n return { # type: ignore\n Encoding.PEM: load_pem_private_key,\n Encoding.DER: load_der_private_key\n }[real_encoding](key_data, password, default_backend())\n\n return generic_load(file_path, solve)", "def read_private_key_file(pkey_file,\n pkey_password=None,\n key_type=None,\n logger=None):\n ssh_pkey = None\n key_types = (paramiko.RSAKey, paramiko.DSSKey, paramiko.ECDSAKey)\n if hasattr(paramiko, 'Ed25519Key'):\n # NOQA: new in paramiko>=2.2: http://docs.paramiko.org/en/stable/api/keys.html#module-paramiko.ed25519key\n key_types += (paramiko.Ed25519Key, )\n for pkey_class in (key_type,) if key_type else key_types:\n try:\n ssh_pkey = pkey_class.from_private_key_file(\n pkey_file,\n password=pkey_password\n )\n if logger:\n logger.debug('Private key file ({0}, {1}) successfully '\n 'loaded'.format(pkey_file, pkey_class))\n break\n except paramiko.PasswordRequiredException:\n if logger:\n logger.error('Password is required for key {0}'\n .format(pkey_file))\n break\n except paramiko.SSHException:\n if logger:\n logger.debug('Private key file ({0}) could not be loaded '\n 'as type {1} or bad password'\n .format(pkey_file, pkey_class))\n return ssh_pkey", "def password(self, repository):\r\n return self._password(repository)", "def load_Fernet_key(filename):\n\tfich = open(str(filename) +'.key', 'rb')\n\tkey = fich.read() # The key will be type bytes\n\tfich.close()\n\treturn key", "def _get_password(self):\n return self._password", "def _get_password(self, service_name, username, reenter=False):\n\n password_from_keyring = None\n if reenter is False:\n try:\n password_from_keyring = keyring.get_password(\n service_name, username)\n except keyring.errors.KeyringError as exc:\n log.warning(\"Failed to get a valid keyring for password \"\n \"storage: {}\".format(exc))\n\n if password_from_keyring is None:\n log.warning(\"No password was found in the keychain for the \"\n \"provided username.\")\n if system_tools.in_ipynb():\n log.warning(\"You may be using an ipython notebook:\"\n \" the password form will appear in your terminal.\")\n password = getpass.getpass(\"{0}, enter your password:\\n\"\n .format(username))\n else:\n password = password_from_keyring\n\n return password, password_from_keyring", "def get_credentials_from_file(creds_type) -> str:\n with open(\n f\"{TEST_DATA}/auth-basic-auth-mergeable/credentials/auth-basic-auth-{creds_type}-credentials.txt\"\n ) as credentials_file:\n return credentials_file.read().replace(\"\\n\", \"\")", "def credentials_from_rc(self):\n mprc_filename = os.environ[\"HOME\"]+'/.mofplusrc'\n with open(mprc_filename, 'r') as mprc:\n username = mprc.readline().split()[0]\n pw = mprc.readline().split()[0]\n return username, pw", "def _loadCredentials(self):\r\n with open(self.filename) as f:\r\n for line in f:\r\n try:\r\n parts = self.scanner.match(line).groups()\r\n except AttributeError:\r\n raise CredentialError('Credential database corrupted')\r\n try:\r\n yield parts[0], UserInfo(parts[1], int(parts[2]),\r\n set(parts[3].split(':')))\r\n except KeyError:\r\n raise CredentialError('Credential database corrupted')", "def get_user_password(text):\n return getpass.getpass(text)" ]
[ "0.6575987", "0.6558546", "0.64441496", "0.6166849", "0.60289574", "0.600363", "0.5950623", "0.59272367", "0.59127575", "0.5835766", "0.5814732", "0.57961124", "0.5790752", "0.5773013", "0.5772841", "0.5765816", "0.57390904", "0.5737546", "0.5721898", "0.56895983", "0.5678449", "0.56635416", "0.56409097", "0.56155765", "0.5613415", "0.56104666", "0.56045145", "0.5599942", "0.5580903", "0.5579324", "0.5577684", "0.5568975", "0.55578446", "0.55578446", "0.5551828", "0.5538477", "0.55380344", "0.5528041", "0.55227387", "0.55002075", "0.5483669", "0.5472972", "0.5454671", "0.5453537", "0.5435068", "0.54306275", "0.54305786", "0.5417566", "0.5391132", "0.53857976", "0.53810763", "0.53366953", "0.5322101", "0.52919096", "0.5286928", "0.5286884", "0.52847594", "0.52723867", "0.5262124", "0.52581424", "0.5247047", "0.520988", "0.5195067", "0.51900107", "0.5157299", "0.5156336", "0.51546603", "0.51414967", "0.5140846", "0.51404846", "0.51398355", "0.5131872", "0.5130792", "0.51155055", "0.51147705", "0.51049185", "0.50958335", "0.5086977", "0.5086502", "0.5082036", "0.5081088", "0.5081063", "0.507538", "0.50723094", "0.50719297", "0.50675124", "0.5065943", "0.50643617", "0.50638497", "0.50611466", "0.50595814", "0.50527036", "0.5033332", "0.5030643", "0.5030484", "0.50258523", "0.5019566", "0.50193685", "0.5019149", "0.5013486" ]
0.807333
0
Parse list of strings, separated by c.
def loop_escaped(val, c): if not val: val = '' val = as_unicode(val) rc = re.compile(r'([^%s\\]|\\.)*' % re.escape(c)) pos = 0 while pos < len(val): if val[pos] == c: pos += 1 continue m = rc.match(val, pos) if not m: raise Exception('rx bug') pos = m.end() yield unescape(m.group(0))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_list(slist):\n res = []\n for v in loop_escaped(slist, ','):\n v = v.strip()\n if v:\n res.append(v)\n return res", "def parse_list(value: str) -> list[str]:\n segments = _QUOTED_SEGMENT_RE.findall(value)\n for segment in segments:\n left, match, right = value.partition(segment)\n value = ''.join([left, match.replace(',', '\\000'), right])\n return [_dequote(x.strip()).replace('\\000', ',') for x in value.split(',')]", "def parse_list_str(setting_str):\n return re.split('\\s*,\\s*', setting_str)", "def parse_string_list(data):\n txt = data.decode()\n x = ast.literal_eval(txt)\n return x", "def split_cdl(cdl_string):\n return [x.strip() for x in cdl_string.split(',')]", "def parse_commands(command_list: List[str]) -> List[str]:\n return [' '.join(x.split('-')) for x in command_list]", "def parsePresetStrings(ps_list):\n\n return [parsePreset(ps) for ps in ps_list]", "def parse_list(l):\n\n if not l: return []\n return uniq([k.strip() for k in l.split() if len(k.strip()) > 0])", "def fromList(cls, list):\n obj = CIGAR()\n if list == ['*']:\n obj._tokens = '*'\n else:\n if not all( type(e) == tuple \n and len(e) == 2 \n and type(e[0]) == int \n and e[0] >= 1 \n and type(e[1]) == str \n and len(e[1]) == 1 \n and e[1] in 'MIDNSHPX=' for e in list):\n raise ValueError('Invalid list to form CIGAR string')\n\n obj._tokens = list\n obj._changed = True\n obj.compact()\n return obj", "def from_list(l):\n if isinstance(l, str):\n for special_char in (' ', '\\n', '\\t', '(', ')', '\\\"'):\n if special_char in l:\n return '\\\"' + l + '\\\"'\n return l\n return '(' + ' '.join(from_list(e) for e in l) + ')'", "def string_list(s):\n\n if not isinstance(s, str):\n raise ValueError(f\"Not a string: {s!r}\")\n return [p for p in [part.strip() for part in s.split(\",\")] if p]", "def _parse_list(string, dtype=int, delimiter=','):\n\n items = string.lower().strip().replace(' ', '').split(delimiter)\n\n if 'none' in items:\n items.pop(items.index('none'))\n contains_none = True\n else:\n contains_none = False\n\n\n if dtype == bool:\n items = [item == 'true' for item in items]\n else:\n items = [dtype(item) for item in items]\n\n if contains_none:\n items.append(None)\n\n return items", "def _split_input_list(str_list):\r\n\r\n new_list = re.split(r'[\\n\\r\\s,]', str_list)\r\n new_list = [s.strip() for s in new_list]\r\n new_list = [s for s in new_list if s != '']\r\n\r\n return new_list", "def commaStringParse(string):\n dels = []\n cur = \"\"\n length = len(string)\n for c in string:\n # skip spaces outside words\n if c == \" \" and cur == \"\":\n continue\n # new delegation found\n elif c == \",\":\n dels.append(cur)\n cur = \"\"\n # last name in list\n elif string.index(c) == length - 1:\n cur += c\n dels.append(cur)\n else:\n cur += c\n return dels", "def parse_list(tokens: deque) -> list:\n # Exemplo de implementação...\n\n # Consome o colchete de abertura\n if tokens.popleft() != \"[\":\n raise SyntaxError\n\n # Verifica se corresponde à uma lista vazia\n elif tokens[0] == \"]\":\n tokens.popleft()\n return []\n\n # Consome os valores\n xs = []\n while True:\n # Lê valor e salva na saída\n x = parse_value(tokens)\n xs.append(x)\n\n # Verifica fim da lista e remove vírgula se necessário\n tk = tokens.popleft()\n if tk == \"]\":\n break\n elif tk != \",\":\n raise SyntaxError(\"token inesperada em lista: %r\" % tk)\n\n return xs", "def arg_parse_list(text, j):\n\n depth = 0\n loc2 = j\n arglist = []\n prev_start = j\n while 1:\n if text[loc2] == \"(\":\n if depth == 0:\n prev_start = loc2 + 1\n depth = depth + 1\n\n elif text[loc2] == \")\":\n depth = depth - 1\n if depth == 0:\n arglist.append(text[prev_start:loc2].strip())\n break\n\n elif text[loc2] == \",\":\n if depth == 1:\n arglist.append(text[prev_start:loc2].strip())\n prev_start = loc2 + 1\n elif text[loc2] == \"{\":\n depth = depth + 1\n elif text[loc2] == \"}\":\n depth = depth - 1\n loc2 = loc2 + 1\n return arglist", "def parsing(l):\r\n l_p = []\r\n for i in range(0,len(l),3):\r\n l_p.append(l[i:i+3])\r\n return l_p", "def parse(name: unicode) -> List[unicode]:\n ...", "def parse_list(string, dtype):\n # l = string.replace('[', '').replace(']', '').replace(' ', '').split(',')\n s = string.replace(' ', '') # remove all spaces first\n if s[0] == '[': # it's not only a single item\n s = s[1:-1] # remove [ and ] from start and end only\n else: # it's just a single item\n return dtype(s)\n if s[0] == '[': # it's a list of lists\n splitted = s.split('],')\n for i in range(len(splitted)-1):\n splitted[i] += ']' # splitting removed the closing bracket from all but the last item\n l = list(map(lambda x: parse_list(x, dtype), splitted))\n else:\n splitted = s.split(',')\n l = list(map(dtype, splitted))\n return l", "def separate_list_input(input_: str) -> List[str]:\n no_commas = input_.replace(\",\", \" \")\n # Each string is naturally unicode, this causes problems with M2Crypto SANs\n # TODO: check if above is still true when M2Crypto is gone ^\n return [str(string) for string in no_commas.split()]", "def __parse_string_for_delimiter__(self, data):\n parsed = []\n for row in data:\n row = self.__remove_break_line__(row)\n row = self.__split_for_delimiter__(row)\n parsed.append(row)\n return parsed", "def __parse_list(self) -> list:\r\n self.idx += 1\r\n l = []\r\n while self.data[self.idx: self.idx + 1] != b'e':\r\n l.append(self.__parse())\r\n self.idx += 1\r\n return l", "def multiSplit(stringList, tokenList=[\" \"]):\r\n if not stringList: return []\r\n if isinstance(tokenList, basestring):\r\n tokenList = [tokenList]\r\n if isinstance(stringList, basestring):\r\n stringList = [stringList]\r\n rtnList = stringList\r\n for token in tokenList:\r\n rtnList = rtnList[:]\r\n for string in rtnList:\r\n if string.find(token) > -1:\r\n rtnList.remove(string)\r\n names = string.split(token)\r\n for name in names:\r\n name = name.strip()\r\n if name:\r\n rtnList.append(name)\r\n return rtnList", "def parse(input):\n return [l.strip() for l in input.splitlines() if l.strip()]", "def split_on_commas(string):\n items = []\n char_buffer = []\n openings = []\n for i, char in enumerate(string):\n if char == ',' and len(openings) == 0:\n items.append(\"\".join(char_buffer))\n char_buffer = []\n continue\n elif char == ' ' and len(char_buffer) == 0:\n continue\n elif char == '(' or char == '[':\n openings.append(char)\n elif char == ')':\n if openings.pop() != '(':\n raise Exception('Invalid bracket end \")\", col {}.'.format(i))\n elif char == ']':\n if openings.pop() != '[':\n raise Exception('Invalid bracket end \"]\", col {}.'.format(i))\n char_buffer.append(char)\n items.append(\"\".join(char_buffer))\n return items", "def str_list_works(x):\n import ast\n x = ast.literal_eval(x)\n x = [n.strip() for n in x]\n return (x)", "def list_of_strings_to_c_string_array(l):\n c_strings = (ctypes.c_char_p*(len(l)))()\n for i, s in enumerate(l):\n if s == None:\n c_strings[i] = None\n else:\n # create_string_buffer() handles conversion\n c_strings[i] = ctypes.create_string_buffer(strtobytes(s)).value\n return c_strings", "def readStringList( Text, ItemSeparator = ';' ):\n ValuesList = []\n try:\n if Text.find(ItemSeparator) >= 0: \n ValuesList = Text.strip().split(ItemSeparator)\n except:\n pass\n return ValuesList", "def cs_string_to_typed_list(cs_str: str, sep=\",\", type_conv_fcn=float):\n try:\n list_strings = cs_str.split(sep)\n if all(map(lambda s: s.strip() == '', cs_str.split(sep))):\n # we are getting a list of empty strings we return [] and do not print warning\n return []\n return list([type_conv_fcn(x) for x in list_strings])\n except:\n warnings.warn('Could not convert string {s} to a typed list'.format(s=cs_str))\n return []", "def _parseVec(self, str):\r\n\t\tvec = []\r\n\t\tsplt = str.split()\r\n\t\tfor i in range(0,len(splt)):\r\n\t\t\tvec.append(self._parseNumber(splt[i]))\r\n\t\treturn vec", "def str2list(parser: Callable[[str], Any]) -> Callable[[str], List[Any]]:\n\n def _parse(string: str) -> List[Any]:\n return [parser(entry) for entry in string.split()]\n\n return _parse", "def add_parsed_strings(self, rule_name, str_name, str_list):\n\n str_id = 0\n\n for parsed_str in str_list:\n self.add_text_string(\n rule_name,\n parsed_str,\n name=str_name + str(str_id),\n modifiers=[\"ascii\", \"wide\"],\n )\n\n str_id += 1\n\n return True", "def name_list(string):\n names = []\n for name in string.split('; '):\n if ', ' in name:\n last_comma_first = name.split(', ', 2)\n first = last_comma_first[1].strip()\n last = last_comma_first[0].strip()\n names.append(first + \" \" + last)\n else:\n names.append(name.strip())\n return names", "def parse_comma_separated_list(\n value: str, regexp: Pattern[str] = COMMA_SEPARATED_LIST_RE\n) -> list[str]:\n assert isinstance(value, str), value\n\n separated = regexp.split(value)\n item_gen = (item.strip() for item in separated)\n return [item for item in item_gen if item]", "def parse_command_list(config_str):\n return [command for command in config_str.splitlines() if command]", "def _parse_qualified_list(value: str) -> list[str]:\n found_wildcard = False\n values, rejected_values = [], []\n parsed = parse_list(value)\n default = float(len(parsed) + 1)\n highest = default + 1.0\n for raw_str in parsed:\n charset, _, parameter_str = raw_str.replace(' ', '').partition(';')\n if charset == '*':\n found_wildcard = True\n continue\n params = dict(_parse_parameter_list(parameter_str.split(';')))\n quality = float(params.pop('q', default))\n if quality < 0.001:\n rejected_values.append(charset)\n elif quality == 1.0:\n values.append((highest + default, charset))\n else:\n values.append((quality, charset))\n default -= 1.0\n parsed = [value[1] for value in sorted(values, reverse=True)]\n if found_wildcard:\n parsed.append('*')\n parsed.extend(rejected_values)\n return parsed", "def test_string_to_list_string_delimiter(self):\n assert_equals(\n str_to_list(' a | b | c ', delimiter='|'),\n ['a', 'b', 'c']\n )", "def _split_mesy_list(string):\n init_list = [i.strip() for i in string.split(',') if i]\n final_list = []\n for i in init_list:\n if i.isspace():\n continue\n andlist = i.split('and')\n amplist = i.split('&')\n if len(andlist) > 1:\n for j in andlist:\n if not j or j.isspace():\n continue\n final_list.append(j.strip())\n elif len(amplist) > 1:\n for j in amplist:\n if not j or j.isspace():\n continue\n final_list.append(j.strip())\n else:\n final_list.append(i.strip())\n final_list = [i.strip() for i in final_list if not i.isspace()]\n return [i for i in final_list if i]", "def parse_list(specs):\n if not specs:\n return []\n if isinstance(specs, six.string_types):\n specs = specs.split(',')\n return [TagPattern.parse(spec) for spec in specs]", "def cdd_convert(string, field=self.field()):\n return [field(x) for x in string.split()]", "def split_string(source,splitlist):\n\tspaces = \" \" * len(splitlist)\n\ttranstable = string.maketrans(splitlist, spaces)\n\tsource = string.translate(source, transtable)\n\treturn source.split()", "def parse(self) -> List[List[Union[str,int]]]:\n return self.__create_list(cp(self.tokens))", "def parse(self):\n if len(self._content) == 0:\n return []\n\n groups = self._content.split(\",\")\n arr = set()\n\n def func(acc, cpu):\n if ListFormatParser._is_range(cpu):\n acc.update(ListFormatParser._range_to_list(cpu))\n else:\n acc.add(int(cpu))\n return acc\n\n return list(functools.reduce(func, groups, arr))", "def clean_commas(song_list: List[str]) -> List[str]:\n res = []\n for idx, line in enumerate(song_list):\n if line[-1] == ',':\n if idx + 1 >= len(song_list) or song_list[idx + 1] == '':\n line = line[:-1]\n res.append(line)\n return res", "def test_string_to_list_string(self):\n assert_equals(\n str_to_list('a, b, c'),\n ['a', 'b', 'c']\n )", "def parse(self, output):\n q = '['\n prepro = [line for line in output if \"['''', ''''],\" not in line]\n prepro = [line for line in prepro if \"[',', ','],\" not in line]\n for line in prepro:\n process = line.strip()\n process = process.replace('[', '(')\n process = process.replace(']', ')')\n if '?' in process:\n q += '], ['\n else:\n q += process\n q += \"('?','.') ]\"\n # Evaluation of a string with a list format\n return ast.literal_eval(q)", "def split(self) -> List[String]:\n pass", "def parse3DList(self,string):\r\n string = string.replace(\"[\",\"\")\r\n string = string.replace(\"]],\", \"**\")\r\n string = string.replace(\"],\",\"*\")\r\n string = string.replace(\"]\", \"\")\r\n string = string.split(\"**\")\r\n temp = []\r\n for i in string:\r\n temp.append(i.split(\"*\"))\r\n string = copy.deepcopy(temp)\r\n for i in xrange(len(string)):\r\n for j in xrange(len(string[i])):\r\n string[i][j] = string[i][j].split(\",\")\r\n for i in xrange(len(string)):\r\n for j in xrange(len(string[i])):\r\n for k in xrange(len(string[i][j])):\r\n string[i][j][k] = float(string[i][j][k])\r\n string[i][j] = list(string[i][j])\r\n return string", "def _parse_parameter_list(\n parameter_list: abc.Iterable[str],\n normalize_parameter_names: bool = False,\n normalize_parameter_values: bool = True,\n strip_interior_whitespace: bool = False) -> list[tuple[str, str]]:\n parameters = []\n for param in parameter_list:\n param = param.strip()\n if param:\n name, value = param.split('=')\n if strip_interior_whitespace:\n name, value = name.strip(), value.strip()\n if normalize_parameter_names:\n name = name.lower()\n if normalize_parameter_values:\n value = value.lower()\n parameters.append((name, _dequote(value.strip())))\n return parameters", "def parse_list_header(value):\n result = []\n for item in urllib2.parse_http_list(value):\n if item[:1] == item[-1:] == '\"':\n item = unquote_header_value(item[1:-1])\n result.append(item)\n return result", "def _parse_list_of_lists(string, delimiter_elements=',', delimiter_lists=':', delimiter_pipelines=';', dtype=float):\n new_list = []\n for sub_list in string.strip().replace(' ', '').split(delimiter_pipelines):\n if delimiter_lists in sub_list:\n new_list.append([_parse_list(item, dtype=dtype, delimiter=delimiter_elements) for item in sub_list.split(delimiter_lists)])\n else:\n new_list.append(_parse_list(sub_list, dtype=dtype, delimiter=delimiter_elements))\n return new_list", "def _fmt_list(self, string_list):\n return self._fmt_csv(string_list, list_braces=\"[]\")", "def test_string_to_list_string_delimiter(self):\n\n assert_equals(\n str_to_list(' a | b | c ', delimiter='|'),\n ['a', 'b', 'c']\n )", "def process_list_arg(arg):\n if isinstance(arg, list):\n return arg\n elif isinstance(arg, basestring):\n args = []\n for part in arg.split(\",\"):\n args.append(part.strip())\n return args", "def test_list_representation(self):\n \n lr = ['- L1\\n- L2\\n- L3',\n 'text\\n- L1\\n- L2\\ntext\\n- L3',\n '* H\\n- L1\\n - L2\\n** H\\n- L3',\n ' - L1\\n - L2\\n - L3',\n '- L1\\n - L2\\n - L3'\n ]\n\n for l in lr:\n self.assertEqual(l, str(parser.parse(l)))", "def parse_line(line):\n tok = line.replace('[', '').replace(']', '').split(', ')\n list = [tok[0]]\n tok = tok[1:]\n temp = list\n parse_line_helper(temp, tok)\n return list", "def tokenlist(sep, item):\n return item + ZeroOrMore(sep + item) + Optional(sep)", "def parse_tags(s: str) -> List[str]:\n tags = []\n buf = []\n in_quoted = None\n\n for c in s:\n if in_quoted:\n if c == in_quoted:\n in_quoted = None\n else:\n buf.append(c)\n elif c == '\"' or c == '\\'':\n in_quoted = c\n elif c == ',':\n if buf:\n tag = ''.join(buf).strip()\n if tag:\n tags.append(tag)\n buf.clear()\n else:\n buf.append(c)\n\n if buf:\n tag = ''.join(buf).strip()\n if tag:\n tags.append(tag)\n\n return tags", "def string_list(out, name, items):\n print(f\"const char* const {name}[] = {{\", file=out)\n for item in items:\n print(f\" \\\"{item}\\\",\", file=out)\n print(\" nullptr,\", file=out)\n print(\"};\", file=out)\n print(\"\", file=out)\n pass", "def parse_list(list_str):\n return list(map(int, re.findall(r'\\d+', list_str)))", "def stringInputToList(x):\n return list(filter(None, [y.strip() for y in x.split(',')]))", "def tokenize(self, input_string: str) -> List[str]:", "def __call__(self, string, include_gd=True): # -> \"TokenList\":\r\n self.string = string\r\n return [x for x in self.nextToken(include_gd)]", "def getlist(self, option, sep=',', chars=None):\n return [chunk.strip(chars) for chunk in option.split(sep)]", "def parseInput(input, delimiter='|'):\n return input.split(delimiter)", "def test_string_to_list_string(self):\n\n assert_equals(\n str_to_list('a, b, c'),\n ['a', 'b', 'c']\n )", "def parse_list(entry, separator):\n r = [x for x in entry.split(separator)] if entry else None\n\n # for lists with only a single element, return just the element\n if isinstance(r, list) and len(r) == 1:\n return r[0]\n else:\n return r", "def split(a):\r\n compos = [-1] # compos stores the positions of the relevant commas in the argument string\r\n compos.extend(t[2][1] for t in generate_tokens(StringIO(a).readline) if t[1] == ',')\r\n compos.append(len(a))\r\n return [ a[compos[i]+1:compos[i+1]] for i in xrange(len(compos)-1)]", "def __call__(self, string, include_gd=True): # -> \"TokenList\":\r\n self.load(string)\r\n result = []\r\n while True:\r\n try:\r\n result.append(self.nextToken(include_gd))\r\n except:\r\n break\r\n return result", "def strToStrList(x):\n if type(x)==str:\n return x[2:-2].split(\"', '\")", "def decode(self, s):\n lststr = s.split(',')\n if s=='': return []\n rst = []\n for i in range(len(lststr)):\n rst.append(lststr[i])\n return rst", "def __parse_line(moves: str, start: int) -> List[str]:\n\n return [moves[start + j] for j in range(SIZE)]", "def list_option(s):\n return _convert(s, (list, tuple))", "def parser(sent_list): #input: list of sentences", "def test_get_items_from_string() -> None:\n assert [\"i\", \"p\"] == common_util.get_items_from_string(\"i, ,p\")\n assert [\"i\", \"p\"] == common_util.get_items_from_string(\"i- -p\", separator=\"-\")\n assert [\"i\", \" \", \" p\"] == common_util.get_items_from_string(\"i, , p\", remove_blanks=False)\n assert [\"i\", \"p\"] == common_util.get_items_from_string(\"i, , p\")\n assert [] == common_util.get_items_from_string(\"\")", "def parse1DList(self,string):\r\n string = string.replace(\"[\",\"\")\r\n string = string.replace(\"]\",\"\")\r\n string = string.split(\",\")\r\n for i in xrange(len(string)):\r\n string[i] = float(string[i])\r\n string = list(string)\r\n return string", "def _parse_params(members_list):\n return [literal_eval(p.strip()) for p in members_list]", "def format_string_to_list(self, avi_string):\n\n repls = ('[', ''), (']', ''), (\"'\", \"\")\n avi_string = reduce(lambda a, kv: a.replace(*kv), repls, avi_string)\n return avi_string.split(',')", "def _parse_comments(s):\n i = iter(s.split(\",\"))\n\n rv = []\n try:\n while True:\n # get the flags and text of a comment part\n flags, text = next(i).split(':', 1)\n\n if len(flags) == 0:\n rv.append(('OTHER', text, text, text, \"\"))\n # parse 3-part comment, but ignore those with O flag\n elif 's' in flags and 'O' not in flags:\n ctriple = [\"TRIPLE\"]\n indent = \"\"\n\n if flags[-1] in string.digits:\n indent = \" \" * int(flags[-1])\n ctriple.append(text)\n\n flags, text = next(i).split(':', 1)\n assert flags[0] == 'm'\n ctriple.append(text)\n\n flags, text = next(i).split(':', 1)\n assert flags[0] == 'e'\n ctriple.append(text)\n ctriple.append(indent)\n\n rv.append(ctriple)\n elif 'b' in flags:\n if len(text) == 1:\n rv.insert(0, (\"SINGLE_CHAR\", text, text, text, \"\"))\n except StopIteration:\n return rv", "def string_to_list(string: str, sep: str):\n return string.strip(\"][\").split(sep)", "def parse(name: unicode, ignoreLeaderParens: bool) -> List[unicode]:\n ...", "def _concatenated_list(s):\n m = LIST_CONCAT_P.match(s.strip())\n if not m:\n raise ValueError(s)\n maybe_list = _decode_flag_val(m.group(1))\n if isinstance(maybe_list, list):\n return maybe_list * int(m.group(2))\n return s", "def _parse_aux_codes(cls, aux_codes_str: str) -> list[str]:\n try:\n tokens = cls._auxiliary_codes.parseString(aux_codes_str)\n except pyparsing.ParseException as exc:\n raise ValueError(\n f\"Could not parse: {aux_codes_str!r}, error: {exc.msg},\"\n f\" error at char {exc.loc}\"\n ) from None\n return list(tokens)", "def parse_str( s: str ) -> list:\n\n tree = ET.fromstring( s )\n if tree is None: return None\n return parse_tree( tree )", "def _convert_str_to_list(cls, v: Union[List[str], str]) -> List[str]:\n if isinstance(v, str):\n return v.split(\",\")\n return v # cov: ignore", "def base_parsing(lines):\n lines = [l.strip() for l in lines]\n return [ tuple(line.split(sep='-')) for line in lines ]", "def get_cpd_ids(string):\n return [x for x in string.split(\" \") if x.startswith(\"C\")]", "def parse_urls(start_urls: Union[str, List[str]]) -> List[str]:\n if not (\n (type(start_urls) == str)\n or ((type(start_urls) == list) and (all([type(x) == str for x in start_urls])))\n ):\n error_message = (\n \"Argument 'start_urls' need to be of type list or (comma-separated) string.\"\n )\n logger.error(error_message)\n raise ValueError(error_message)\n\n return start_urls.split(\",\") if type(start_urls) == str else start_urls # type: ignore", "def parse_data(lst):\n\tfinallist = []\n\tfor element in lst:\n\t\tfinallist.append(parse_string_to_listint(element))\n\treturn finallist", "def date_parser(dates):\n return([item.split()[0] for item in dates])\n pass", "def parseGoalList(s):\n return map(Parser._convertGoal, goalListNT.parseString(s).asList())", "def _strings_to_list(one_or_more_strings):\n if isinstance(one_or_more_strings, str):\n return [one_or_more_strings]\n else:\n return list(one_or_more_strings)", "def strToList(x):\n if type(x)==str:\n return x[2:-2].split(\"', '\")", "def parse_crn_string(data):\n crn_document = crn_document_setup()\n return _post_process(crn_document.parseString(data).asList())", "def _tokenize_by_commas(string: str) -> Optional[List[str]]:\n if not string:\n return None\n\n quoted_comma_ranges = [range(m.start(0), m.end(0)) for m in QUOTED_WORD_SYNTAX.finditer(string)]\n\n def clean(s: str) -> str:\n s = s.strip() # whitespace\n if len(s) > 0 and s[0] == '\"' and s[len(s) - 1] == '\"': # surrounding quotes\n s = s[1:-1]\n if len(s) > 0 and s[0] == \"'\" and s[len(s) - 1] == \"'\":\n s = s[1:-1]\n return s\n\n block_start_index = 0\n search_start_index = block_start_index\n tokens = []\n index = string.find(\",\", search_start_index)\n while index > 0:\n is_quoted = False\n for quoted_comma_range in quoted_comma_ranges:\n if index in quoted_comma_range:\n is_quoted = True\n break\n if is_quoted:\n search_start_index = index + 1\n else:\n tokens.append(clean(string[block_start_index:index]))\n block_start_index = index + 1\n search_start_index = block_start_index\n index = string.find(\",\", search_start_index)\n\n if block_start_index < len(string):\n tokens.append(clean(string[block_start_index:]))\n return tokens", "def split_cmdline_filter_items(string):\n filter_items = string.split(',')\n return filter_items", "def parseArr(s) :\n\n rc = []\n if s.startswith('[') and s.endswith(']') :\n s = s[1:-1]\n z = s.split(',')\n for p in z :\n if p.find('..') >= 0 :\n zz = p.split('..')\n if len(zz)==2 :\n b = str2raw(zz[0])\n e = str2raw(zz[1])\n b = safe2Int(b)\n e = safe2Int(e)\n if not b==None and not e==None and (e >= e):\n for i in range(b,e+1) :\n rc.append(str(i))\n\n else :\n p = str2raw(p)\n rc.append(str(p))\n pass\n return rc", "def from_str(cls, string):\n # If quotes are found, parse it as a Python string literal after adding\n # brackets around\n if '\"' in string or \"'\" in string:\n string = '[' + string + ']'\n l = ast.literal_eval(string)\n return [str(x) for x in l]\n # Otherwise, just split on commas\n else:\n return string.split(',')", "def _parse_emails(self, emails):\n return [e.strip() for e in emails.split(',')]", "def _cast_strlist_to_C(py_strlist):\n c_strarr = (str_t * len(py_strlist))()\n c_strarr[:] = py_strlist\n return c_strarr", "def _fmt_csv(string_list, list_braces = None):\n if len(string_list) == 0:\n return \"\"\n first = True\n str_ = \"\"\n if list_braces != None:\n str_ += list_braces[0]\n for string in string_list:\n if string != None:\n if first:\n first = False\n else:\n str_ += \", \"\n str_ += string\n if list_braces != None:\n str_ += list_braces[1]\n return str_" ]
[ "0.6619962", "0.6402648", "0.61849755", "0.6105302", "0.6098038", "0.6075494", "0.60474575", "0.5930806", "0.59285104", "0.5884096", "0.5826036", "0.58210325", "0.58042055", "0.5785377", "0.57437575", "0.57404375", "0.5691565", "0.56817746", "0.5671865", "0.56397724", "0.5596686", "0.559299", "0.556531", "0.55651313", "0.5546184", "0.55406094", "0.5522222", "0.5498667", "0.5492943", "0.5476297", "0.54583913", "0.5439237", "0.54371357", "0.54192185", "0.541605", "0.54080176", "0.53826255", "0.5379464", "0.53624517", "0.5356311", "0.53453404", "0.534439", "0.533354", "0.5325835", "0.5317699", "0.53137445", "0.5313031", "0.5311814", "0.5308274", "0.53025395", "0.5299365", "0.5290845", "0.52885264", "0.5285911", "0.52830774", "0.52808076", "0.52640414", "0.5257639", "0.5242991", "0.52428806", "0.52376807", "0.52345425", "0.52224386", "0.5215462", "0.5207058", "0.5203697", "0.51891553", "0.51886755", "0.518684", "0.51817256", "0.5174194", "0.5168895", "0.5153211", "0.5146093", "0.5142866", "0.51399547", "0.51324016", "0.51311284", "0.5130162", "0.51291734", "0.512728", "0.5125423", "0.5114183", "0.51141745", "0.51128155", "0.51075274", "0.5106965", "0.510448", "0.50997686", "0.5088745", "0.5084809", "0.50825053", "0.5072845", "0.5071777", "0.5071135", "0.507054", "0.50682557", "0.5064575", "0.5061023", "0.5052853", "0.5043362" ]
0.0
-1
Parse commaseparated list to strings.
def parse_list(slist): res = [] for v in loop_escaped(slist, ','): v = v.strip() if v: res.append(v) return res
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse(self, output):\n q = '['\n prepro = [line for line in output if \"['''', ''''],\" not in line]\n prepro = [line for line in prepro if \"[',', ','],\" not in line]\n for line in prepro:\n process = line.strip()\n process = process.replace('[', '(')\n process = process.replace(']', ')')\n if '?' in process:\n q += '], ['\n else:\n q += process\n q += \"('?','.') ]\"\n # Evaluation of a string with a list format\n return ast.literal_eval(q)", "def parse_string_list(data):\n txt = data.decode()\n x = ast.literal_eval(txt)\n return x", "def test_list_representation(self):\n \n lr = ['- L1\\n- L2\\n- L3',\n 'text\\n- L1\\n- L2\\ntext\\n- L3',\n '* H\\n- L1\\n - L2\\n** H\\n- L3',\n ' - L1\\n - L2\\n - L3',\n '- L1\\n - L2\\n - L3'\n ]\n\n for l in lr:\n self.assertEqual(l, str(parser.parse(l)))", "def parse_list(value: str) -> list[str]:\n segments = _QUOTED_SEGMENT_RE.findall(value)\n for segment in segments:\n left, match, right = value.partition(segment)\n value = ''.join([left, match.replace(',', '\\000'), right])\n return [_dequote(x.strip()).replace('\\000', ',') for x in value.split(',')]", "def parse_order_info(self):\n\n list_parse = self.info.strip().split(',')\n orders_list = []\n\n for single_order in list_parse:\n single_order_array = single_order.split(';')\n orders_list.append(single_order_array)\n single_order_list = []\n\n return orders_list", "def split_commaseparated_tags(cls, commaseparatedtags):\n if commaseparatedtags.strip() == '':\n return []\n else:\n return [\n cls.normalize_tag(tagstring)\n for tagstring in list([_f for _f in re.split(r'[,\\s]', commaseparatedtags) if _f])]", "def _parse_parameter_list(\n parameter_list: abc.Iterable[str],\n normalize_parameter_names: bool = False,\n normalize_parameter_values: bool = True,\n strip_interior_whitespace: bool = False) -> list[tuple[str, str]]:\n parameters = []\n for param in parameter_list:\n param = param.strip()\n if param:\n name, value = param.split('=')\n if strip_interior_whitespace:\n name, value = name.strip(), value.strip()\n if normalize_parameter_names:\n name = name.lower()\n if normalize_parameter_values:\n value = value.lower()\n parameters.append((name, _dequote(value.strip())))\n return parameters", "def from_list(l):\n if isinstance(l, str):\n for special_char in (' ', '\\n', '\\t', '(', ')', '\\\"'):\n if special_char in l:\n return '\\\"' + l + '\\\"'\n return l\n return '(' + ' '.join(from_list(e) for e in l) + ')'", "def parse_commands(command_list: List[str]) -> List[str]:\n return [' '.join(x.split('-')) for x in command_list]", "def process_list_arg(arg):\n if isinstance(arg, list):\n return arg\n elif isinstance(arg, basestring):\n args = []\n for part in arg.split(\",\"):\n args.append(part.strip())\n return args", "def str_list_works(x):\n import ast\n x = ast.literal_eval(x)\n x = [n.strip() for n in x]\n return (x)", "def parseOcrNumList(self, ocrList):\n outStr = \"\"\n\n for numeral in ocrList:\n outStr += (str(numeral.numeralValue))\n\n return outStr", "def listify(item, do_strip=False):\n if not item:\n return []\n elif isinstance(item, list):\n return item\n elif isinstance(item, string_types) and item.count(','):\n if do_strip:\n return [token.strip() for token in item.split(',')]\n else:\n return item.split(',')\n else:\n return [item]", "def clean_commas(song_list: List[str]) -> List[str]:\n res = []\n for idx, line in enumerate(song_list):\n if line[-1] == ',':\n if idx + 1 >= len(song_list) or song_list[idx + 1] == '':\n line = line[:-1]\n res.append(line)\n return res", "def parse_list(tokens: deque) -> list:\n # Exemplo de implementação...\n\n # Consome o colchete de abertura\n if tokens.popleft() != \"[\":\n raise SyntaxError\n\n # Verifica se corresponde à uma lista vazia\n elif tokens[0] == \"]\":\n tokens.popleft()\n return []\n\n # Consome os valores\n xs = []\n while True:\n # Lê valor e salva na saída\n x = parse_value(tokens)\n xs.append(x)\n\n # Verifica fim da lista e remove vírgula se necessário\n tk = tokens.popleft()\n if tk == \"]\":\n break\n elif tk != \",\":\n raise SyntaxError(\"token inesperada em lista: %r\" % tk)\n\n return xs", "def parse_data(lst):\n\tfinallist = []\n\tfor element in lst:\n\t\tfinallist.append(parse_string_to_listint(element))\n\treturn finallist", "def process_list(a_list: list):\n\n return ', '.join(str(s) for s in a_list) if a_list else Presenter.DEFAULT", "def parse_list(l):\n\n if not l: return []\n return uniq([k.strip() for k in l.split() if len(k.strip()) > 0])", "def _fmt_csv(string_list, list_braces = None):\n if len(string_list) == 0:\n return \"\"\n first = True\n str_ = \"\"\n if list_braces != None:\n str_ += list_braces[0]\n for string in string_list:\n if string != None:\n if first:\n first = False\n else:\n str_ += \", \"\n str_ += string\n if list_braces != None:\n str_ += list_braces[1]\n return str_", "def format_list(list):\n return \" \".join(str(tok) for tok in list)", "def parse(self):\n if len(self._content) == 0:\n return []\n\n groups = self._content.split(\",\")\n arr = set()\n\n def func(acc, cpu):\n if ListFormatParser._is_range(cpu):\n acc.update(ListFormatParser._range_to_list(cpu))\n else:\n acc.add(int(cpu))\n return acc\n\n return list(functools.reduce(func, groups, arr))", "def str_list(x):\n #import ast\n #x = ast.literal_eval(x)\n x = x.strip('][').split(', ')\n x1 = [n.strip('\\'') for n in x]\n return (x1)", "def parsing(l):\r\n l_p = []\r\n for i in range(0,len(l),3):\r\n l_p.append(l[i:i+3])\r\n return l_p", "def _parse_list(string, dtype=int, delimiter=','):\n\n items = string.lower().strip().replace(' ', '').split(delimiter)\n\n if 'none' in items:\n items.pop(items.index('none'))\n contains_none = True\n else:\n contains_none = False\n\n\n if dtype == bool:\n items = [item == 'true' for item in items]\n else:\n items = [dtype(item) for item in items]\n\n if contains_none:\n items.append(None)\n\n return items", "def arg_parse_list(text, j):\n\n depth = 0\n loc2 = j\n arglist = []\n prev_start = j\n while 1:\n if text[loc2] == \"(\":\n if depth == 0:\n prev_start = loc2 + 1\n depth = depth + 1\n\n elif text[loc2] == \")\":\n depth = depth - 1\n if depth == 0:\n arglist.append(text[prev_start:loc2].strip())\n break\n\n elif text[loc2] == \",\":\n if depth == 1:\n arglist.append(text[prev_start:loc2].strip())\n prev_start = loc2 + 1\n elif text[loc2] == \"{\":\n depth = depth + 1\n elif text[loc2] == \"}\":\n depth = depth - 1\n loc2 = loc2 + 1\n return arglist", "def _list2str(self, data, delimiter=\",\", classify=lambda x: x):\n res = \"\"\n for i in range(len(data)):\n res += classify(data[i])\n if i != len(data) - 1:\n res += delimiter + \" \"\n return res", "def _fmt_list(self, string_list):\n return self._fmt_csv(string_list, list_braces=\"[]\")", "def parse(\n to_parse: ParsableValues,\n remove_duplicates: bool = True,\n read_files: bool = False,\n domains_to_addrs: bool = False,\n only_addresses: bool = False,\n addrs_to_domains: bool = False,\n only_hostnames: bool = False\n) -> List[str]:\n if isinstance(to_parse, bytes):\n to_parse = to_parse.decode('utf-8')\n\n if isinstance(to_parse, list):\n return normalize(to_parse)", "def _concatenated_list(s):\n m = LIST_CONCAT_P.match(s.strip())\n if not m:\n raise ValueError(s)\n maybe_list = _decode_flag_val(m.group(1))\n if isinstance(maybe_list, list):\n return maybe_list * int(m.group(2))\n return s", "def _parse_params(members_list):\n return [literal_eval(p.strip()) for p in members_list]", "def parse_comm_list(self, comm):\n edges = False\n if len(comm[0]) == 2:\n edges = True\n if self.string_permutation(comm[0], comm[1]):\n if edges:\n found = []\n found.append(comm[0])\n for temp in self.last_solved_pieces:\n if temp in self.edges_numbers:\n sticker = self.dict_stickers[temp]\n if not self.string_permutation_list(sticker, found):\n found.append(sticker)\n found.append(\" flip\")\n comm_new = found\n else:\n found = []\n found.append(comm[0])\n for temp in self.last_solved_pieces:\n if temp in self.corners_numbers:\n sticker = self.dict_stickers[temp]\n if not self.string_permutation_list(sticker, found):\n found.append(sticker)\n found.append(\" twist\")\n comm_new = found\n else:\n comm_new = comm\n return comm_new", "def _format_list(param_list: Iterable[Any]):\n fmt_list = []\n for item in param_list:\n if isinstance(item, str):\n fmt_list.append(f\"'{item}'\")\n else:\n fmt_list.append(f\"{item}\")\n return \",\".join(fmt_list)", "def _py3_safe(parsed_list):\n if len(parsed_list) < 2:\n return parsed_list\n else:\n new_list = [parsed_list[0]]\n nl_append = new_list.append\n for before, after in py23_zip(islice(parsed_list, 0, len(parsed_list)-1),\n islice(parsed_list, 1, None)):\n if isinstance(before, Number) and isinstance(after, Number):\n nl_append(\"\")\n nl_append(after)\n return tuple(new_list)", "def separate_list_input(input_: str) -> List[str]:\n no_commas = input_.replace(\",\", \" \")\n # Each string is naturally unicode, this causes problems with M2Crypto SANs\n # TODO: check if above is still true when M2Crypto is gone ^\n return [str(string) for string in no_commas.split()]", "def __parse_list(self) -> list:\r\n self.idx += 1\r\n l = []\r\n while self.data[self.idx: self.idx + 1] != b'e':\r\n l.append(self.__parse())\r\n self.idx += 1\r\n return l", "def parser(sent_list): #input: list of sentences", "def quote_list(the_list):\n return [\"'%s'\" % element for element in the_list]", "def parse_port_lists(portlists) -> List[PortList]:\n port_lists = []\n\n for port_list in portlists:\n items = {}\n\n if isinstance(port_list.items, collections.Iterable):\n splits = port_list.items.replace(\" \", \"\").strip().split(\"\\n\")\n splits = [x for x in splits if re.search(r'\\d+', x)] #removes comment only entries\n\n\n if len(splits) == 1 and \",\" in splits[0]:\n result = splits[0].split(\",\")\n splits = []\n for i in result:\n splits.append(i)\n\n\n for a in splits:\n splits2 = a.split(\"#\")\n\n if len(splits2) == 1:\n items[splits2[0]] = \"\"\n else:\n items[splits2[0]] = splits2[1]\n\n else:\n items = {}\n\n\n pl = PortList(port_list.ID, port_list.name, port_list.description, items, port_list.TBUID)\n port_lists.append(pl)\n\n\n return port_lists", "def parse_list(string, dtype):\n # l = string.replace('[', '').replace(']', '').replace(' ', '').split(',')\n s = string.replace(' ', '') # remove all spaces first\n if s[0] == '[': # it's not only a single item\n s = s[1:-1] # remove [ and ] from start and end only\n else: # it's just a single item\n return dtype(s)\n if s[0] == '[': # it's a list of lists\n splitted = s.split('],')\n for i in range(len(splitted)-1):\n splitted[i] += ']' # splitting removed the closing bracket from all but the last item\n l = list(map(lambda x: parse_list(x, dtype), splitted))\n else:\n splitted = s.split(',')\n l = list(map(dtype, splitted))\n return l", "def parse_list_str(setting_str):\n return re.split('\\s*,\\s*', setting_str)", "def strToStrList(x):\n if type(x)==str:\n return x[2:-2].split(\"', '\")", "def JoinList(LIST):\r\n if type(LIST) == list:\r\n out = ', '.join(LIST)\r\n elif type(LIST) == str:\r\n out = LIST\r\n return out", "def parse(self) -> List[List[Union[str,int]]]:\n return self.__create_list(cp(self.tokens))", "def list_to_perl_string(input_list):\n elems = []\n for v in input_list:\n t = type(v).__name__\n if t == 'str':\n elems.append(\"\\\"%s\\\"\" % escape_perl_string(v))\n elif t == 'int':\n elems.append(\"%d\" % v)\n elif t == 'float':\n elems.append(\"%f\" % v)\n elif t == 'list':\n elems.append(\"%s\" % list_to_perl_string(v))\n elif t == 'dict':\n elems.append(\"%s\" % dict_to_perl_string(v))\n else:\n raise Exception(\"Unsupported type \" + str(t))\n return \"[%s]\" % \", \".join(elems)", "def decode_list(self, tokens: list) -> str:\r\n return NotImplementedError", "def listify(item, delimiter=\",\"):\n if not item:\n return []\n if type(item) is str:\n item = item.split(delimiter)\n if type(item) is not list:\n raise TypeError(\"'listify' must take None, str, or list!\")\n return item", "def _parse_network_list(self, *cmd):\n if self._fail_network_list:\n raise processutils.ProcessExecutionError(exit_code=1)\n else:\n return NETWORK_LIST, ''", "def test_ordered_lists(self):\n\n list_str = '1. One'\n \n doc = parser.parse(list_str)\n self.assertEqual(len(doc.children()), 1)\n\n ol = doc.children()[0]\n self.assertTrue(isinstance(ol, parser.ListNode))\n\n self.assertEqual(str(doc), list_str)\n\n list_str = '- One\\n 1. OneOne\\n 2. OneTwo'\n\n doc = parser.parse(list_str)\n self.assertEqual(len(doc.children()), 1)\n\n ul = doc.children()[0]\n self.assertEqual(len(ul.children), 1)\n\n li = ul.children[0]\n ol = li.children[0]\n\n self.assertEqual(len(ol.children), 2)", "def list_process(field, item_list:List[str]):\n # if isinstance(item_list, list):\n if len(item_list) == 0:\n return {\n\n }\n saved_list = []\n\n for i in item_list:\n saved_list.append(f\"{i}\")\n return {\n field: \",\".join(saved_list)\n }", "def _split_input_list(str_list):\r\n\r\n new_list = re.split(r'[\\n\\r\\s,]', str_list)\r\n new_list = [s.strip() for s in new_list]\r\n new_list = [s for s in new_list if s != '']\r\n\r\n return new_list", "def unsplitter(lst):\n unsplit = []\n for index, t in enumerate(lst):\n if index == 0 or index == len(lst) - 1:\n unsplit.append(t)\n continue\n if \"'\" in t and not t.endswith(\"'\"):\n rejoined = ''.join([lst[index - 1], t])\n unsplit.append(rejoined)\n else:\n if not \"'\" in lst[index + 1]:\n unsplit.append(t)\n return unsplit", "def sqllist(lst):\n if isinstance(lst, basestring): \n return lst\n else:\n return ', '.join(lst)", "def list_str(lis):\r\n as_str = \"\"\r\n for item in lis:\r\n as_str += \" \" + str(item) + \",\"\r\n return as_str[:-1]", "def parsePresetStrings(ps_list):\n\n return [parsePreset(ps) for ps in ps_list]", "def _convertListToString(self, list_of_objects):\n return (';').join(list_of_objects)", "def _format_list_for_query(input_list):\n return (\n \", \".join(input_list).replace(\" \", \"\").replace(\"'\", \"\").replace(\",\", \"%2C\")\n )", "def format_list(my_list):\r\n\treturn \", \".join(my_list[::2]) + (\" and \" + my_list[-1])", "def parse(input):\n return [l.strip() for l in input.splitlines() if l.strip()]", "def to_strs(items) -> List[str]:\n result = []\n for item in items:\n if isinstance(item, str):\n if len(item) > 0:\n if not ((item[0] == '\"' and item[-1] == '\"') or (item[0] == \"'\" and item[-1] == \"'\")):\n result.append('\"' + item + '\"')\n else:\n result.append(item)\n else:\n result.append(item)\n else:\n result.append(str(item))\n return result", "def parse(self):\n\n # if empty, return False, instead of crashing\n if not self.content:\n return False\n command = ''.join(self.content).split(' ')\n\n # return first element, then the rest\n return [command[0], (command[1:])]", "def parse1DList(self,string):\r\n string = string.replace(\"[\",\"\")\r\n string = string.replace(\"]\",\"\")\r\n string = string.split(\",\")\r\n for i in xrange(len(string)):\r\n string[i] = float(string[i])\r\n string = list(string)\r\n return string", "def get_list_of_str2(self):\n pass", "def conf_load_par_list(par_def):\n par_def = par_def[1:-1].split(',')\n par_list = list()\n for p in par_def:\n par_list.append(p.strip())\n return par_list", "def tokenlist(sep, item):\n return item + ZeroOrMore(sep + item) + Optional(sep)", "def __parse_string_for_delimiter__(self, data):\n parsed = []\n for row in data:\n row = self.__remove_break_line__(row)\n row = self.__split_for_delimiter__(row)\n parsed.append(row)\n return parsed", "def pure_list(comma_list):\n pure_items = []\n for comma_item in comma_list:\n for item in comma_item.split(','):\n pure_items.append(item)\n return pure_items", "def stringInputToList(x):\n return list(filter(None, [y.strip() for y in x.split(',')]))", "def parse_list(entry, separator):\n r = [x for x in entry.split(separator)] if entry else None\n\n # for lists with only a single element, return just the element\n if isinstance(r, list) and len(r) == 1:\n return r[0]\n else:\n return r", "def clean(item: list) -> list:\n item = [x.replace(\"'\", \"\")\n .replace('\"', '')\n .replace('[', '')\n .replace(']', '')\n .split(',') for x in item]\n\n return item", "def fromList(cls, list):\n obj = CIGAR()\n if list == ['*']:\n obj._tokens = '*'\n else:\n if not all( type(e) == tuple \n and len(e) == 2 \n and type(e[0]) == int \n and e[0] >= 1 \n and type(e[1]) == str \n and len(e[1]) == 1 \n and e[1] in 'MIDNSHPX=' for e in list):\n raise ValueError('Invalid list to form CIGAR string')\n\n obj._tokens = list\n obj._changed = True\n obj.compact()\n return obj", "def parse(arr_str):\n return arr_str.rstrip().replace(' ', '').split(',')[:-1]", "def string_list(out, name, items):\n print(f\"const char* const {name}[] = {{\", file=out)\n for item in items:\n print(f\" \\\"{item}\\\",\", file=out)\n print(\" nullptr,\", file=out)\n print(\"};\", file=out)\n print(\"\", file=out)\n pass", "def slist(body):\n return SList(body.split(\"\\n\"))", "def parse_list_header(value):\n result = []\n for item in urllib2.parse_http_list(value):\n if item[:1] == item[-1:] == '\"':\n item = unquote_header_value(item[1:-1])\n result.append(item)\n return result", "def toStrFromList(values, precision, delim=\",\"):\n\tsValues = list(map(lambda v: toStr(v, precision), values))\n\treturn delim.join(sValues)", "def mk_sql_list(ls):\n res = \"(\" + ' '.join([str(elem) for elem in intersperse(\",\", ls)]) + \")\"\n return res", "def format_list(my_list):\n \n new_list = my_list[2: -1]\n new_list = new_list[: : 2]\n new_list = [my_list[0]] + new_list\n new_list = new_list + [\"and \" + my_list[-1]]\n \n string = ', '.join(new_list)\n print(string)", "def parse_command_list(config_str):\n return [command for command in config_str.splitlines() if command]", "def parseGoalList(s):\n return map(Parser._convertGoal, goalListNT.parseString(s).asList())", "def proc_reg_list(txt):\n x = QTRegEx.OP_REG_LIST.findall(txt)\n y = x[0].strip(';')\n y = y.strip()\n return y.split(',')", "def list_string(join_list):\n joined_list = '[{}]'.format(join_list, join_list)\n return joined_list", "def str_transform_list(L):\n return [str(x) for x in L]", "def build_list(self, l):\n comma = self.art_type([self.string_type(', ')],\n baseline=0,\n breakpoints=[1])\n repr_elems = self.concatenate(l, comma)\n return self.build_container(\n repr_elems, self.left_square_bracket, self.right_square_bracket)", "def listToStringFormat(self, list) ->str:\n string = ''\n for element in list:\n string = string + str(element) + \"\\n\"\n return string", "def format(lis):\n if lis:\n return \";\".join(\",\".join(str(i) for i in n) for n in lis)\n else:\n return \"NULL\"", "def _listify_input(self, input_string):\n stripped_string = re.sub(r'\\s+', '', input_string.strip())\n split_list = stripped_string.split(\",\")\n return [(x[0], int(x[1::])) for x in split_list]", "def parse_list(list_str):\n return list(map(int, re.findall(r'\\d+', list_str)))", "def list_stringify(inlist):\n outlist = []\n for item in inlist:\n if not isinstance(item, list):\n if not isinstance(item, str):\n thisitem = str(item)\n else:\n thisitem = item\n else:\n thisitem = list_stringify(item)\n outlist.append(thisitem)\n return outlist", "def parse_list_output(output):\n lines = output.splitlines()\n keys = filter(None, lines[1].split('|'))\n keys = [x.lower().strip() for x in keys]\n r = []\n for line in lines[3:-1]:\n if len(line.split()) <= 1:\n continue\n values = filter(None, line.split('|'))\n values = [x.strip() for x in values]\n assert len(keys) == len(values)\n record = dict(zip(keys, values))\n r.append(record)\n return r", "def list_stringify(inlist):\n outlist = []\n for item in inlist:\n if not isinstance(item, (tuple, list)):\n if not isinstance(item, basestring):\n item = str(item)\n else:\n item = list_stringify(item)\n outlist.append(item)\n return outlist", "def csvwrite(inlist, stringify=False):\n out_list = []\n for entry in inlist:\n if stringify:\n new_entry = []\n for val in entry:\n if not isinstance(val, basestring):\n val = str(val)\n new_entry.append(val)\n entry = new_entry\n this_line = ', '.join([elem_quote(val) for val in entry])\n out_list.append(this_line)\n return out_list", "def soar_trimlist(org_list):\n if not isinstance(org_list, list):\n return org_list\n return [element.strip() for element in org_list]", "def format_string_to_list(self, avi_string):\n\n repls = ('[', ''), (']', ''), (\"'\", \"\")\n avi_string = reduce(lambda a, kv: a.replace(*kv), repls, avi_string)\n return avi_string.split(',')", "def _parse_list_of_lists(string, delimiter_elements=',', delimiter_lists=':', delimiter_pipelines=';', dtype=float):\n new_list = []\n for sub_list in string.strip().replace(' ', '').split(delimiter_pipelines):\n if delimiter_lists in sub_list:\n new_list.append([_parse_list(item, dtype=dtype, delimiter=delimiter_elements) for item in sub_list.split(delimiter_lists)])\n else:\n new_list.append(_parse_list(sub_list, dtype=dtype, delimiter=delimiter_elements))\n return new_list", "def readStringList( Text, ItemSeparator = ';' ):\n ValuesList = []\n try:\n if Text.find(ItemSeparator) >= 0: \n ValuesList = Text.strip().split(ItemSeparator)\n except:\n pass\n return ValuesList", "def human_list(lst, connector='and'):\n # we don't want to listify non iterables\n if not getattr(lst, '__iter__', False):\n return lst\n else:\n s = ''\n max_idx = len(lst) - 1\n for i, item in enumerate(lst):\n if i == 0:\n t = '%s'\n elif i == max_idx and max_idx > 1:\n t = ', ' + connector + ' %s'\n elif i == max_idx and max_idx == 1:\n t = ' ' + connector + ' %s'\n else:\n t = ', %s'\n s += t % filter.conditional_escape(item)\n return mark_safe(s)", "def strToList(x):\n if type(x)==str:\n return x[2:-2].split(\"', '\")", "def list_to_string(inputlist):\n outstring = \"\"\n numusers = len(inputlist)\n if numusers == 1: # foo\n outstring += inputlist[0]\n if numusers == 2: # foo and bar\n outstring += (inputlist[0] + \" and \" + inputlist[1])\n if numusers >= 3: # foo, bar and baz\n for x in range(numusers-2):\n outstring += inputlist[x] + \", \"\n outstring += (inputlist[-2] + \" and \" + inputlist[-1])\n return outstring", "def list_to_str(a_list):\n new_str = \"\"\n for item in a_list:\n item = str(item).replace(\"\\'\", \"\\'\\'\")\n if new_str:\n new_str += \", '\" + item + \"'\"\n else:\n new_str = \"'\" + item + \"'\"\n return new_str", "def listparse(inline, recursive = 1, comment = 1, retain = 0, lpstack = None, **keywargs):\n if keywargs.has_key('escapechar'):\n escapechar = keywargs['escapechar'] # either True or False\n else:\n escapechar = True\n outlist = []\n inline = inline.strip()\n if inline[0] != '[':\n return None\n inline = inline[1:].lstrip()\n found_end = 0\n thiselement = None\n escape = 0\n while inline:\n if thiselement == None: # start of the element\n output = unquote(inline, 0, retain, escapechar=escapechar) # partquote mode, retain quotes.......\n if output == None:\n return None\n if output != -1: # element is quoted\n thiselement, inline = output\n inline = inline.lstrip()\n if not inline:\n return None\n if inline[0] not in [',', ']']: # only two valid ways to terminate an element\n return None\n continue\n \n thischar = inline[0]\n inline = inline[1:]\n if escape: # the current character is escaped... whatever it may be\n thiselement =__add(thiselement, thischar)\n escape = 0\n continue\n elif thischar == '\\\\' and escapechar:\n escape = 1\n# thiselement = __add(thiselement, thischar) # commenting this out means we no longer retain the initial '\\' if quoting is on\n continue\n if recursive and not thiselement and thischar == '[':\n output = listparse('[' + inline, True, comment, retain, True, escapechar=escapechar) # we have found a list element, herewith lies recursion...\n if not output:\n return None # which is badly formed\n thiselement, inline = output\n inline = inline.lstrip()\n if not inline:\n return None\n if inline[0] not in [',', ']']: # only two valid ways to terminate an element\n return None\n continue\n if thischar == ',': # element terminated\n outlist.append(thiselement)\n thiselement = None\n inline = inline.lstrip()\n continue\n if thischar == ']':\n if thiselement != None: # trap empty lists\n outlist.append(thiselement)\n found_end = 1\n if lpstack:\n return outlist, inline\n break\n thiselement = __add(thiselement, thischar)\n if not found_end:\n return None\n inline = inline.strip()\n if inline and not comment:\n return None\n elif not comment:\n return outlist\n if inline and inline[0] not in ['#',';']:\n return None\n return outlist, inline" ]
[ "0.6197948", "0.6151549", "0.6102005", "0.606939", "0.60056645", "0.596137", "0.5894534", "0.58915085", "0.58653396", "0.58636934", "0.58515674", "0.5836412", "0.58261746", "0.5818068", "0.5795482", "0.5790831", "0.5783101", "0.5743863", "0.5742642", "0.5727883", "0.5720543", "0.5697288", "0.5682592", "0.5679404", "0.567117", "0.5660241", "0.5650753", "0.5647915", "0.5632484", "0.56312793", "0.56236905", "0.5606574", "0.5605573", "0.5604857", "0.55845034", "0.5575105", "0.5533325", "0.5528703", "0.5521099", "0.551963", "0.5517265", "0.55053043", "0.5473824", "0.5453545", "0.54505366", "0.5441995", "0.544066", "0.54217464", "0.54212093", "0.5412822", "0.5406793", "0.5405099", "0.54047436", "0.54044706", "0.5396811", "0.538567", "0.537628", "0.5373091", "0.5368341", "0.53598", "0.5357429", "0.53426796", "0.5321684", "0.53214955", "0.5319223", "0.5318519", "0.531754", "0.5304549", "0.5293434", "0.5280206", "0.5279134", "0.5270611", "0.5268511", "0.52633816", "0.5261482", "0.5259153", "0.5258007", "0.5253171", "0.5232596", "0.5227778", "0.52236277", "0.5214649", "0.5214204", "0.5212953", "0.521049", "0.52097535", "0.5205053", "0.5201498", "0.52002937", "0.5194008", "0.51871604", "0.5180228", "0.51764154", "0.51690334", "0.5168952", "0.5157799", "0.5157298", "0.5155296", "0.51494867", "0.51380306" ]
0.68961257
0
Parse opensslstyle /separated list to dict.
def parse_dn(dnstr): res = [] for part in loop_escaped(dnstr, '/'): part = part.strip() if not part: continue if '=' not in part: raise InvalidCertificate("Need k=v in Name string") k, v = part.split('=', 1) res.append((k.strip(), v.strip())) return res
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _to_dict(self, data_list):\n data_dict = dict(pair.split('=') for pair in data_list)\n return data_dict", "def parse(line):\n return dict([pair.split(':') for pair in line.split()])", "def _convertToDict(self, parsed):\r\n d = dict()\r\n itp = iter(parsed)\r\n for pp in itp:\r\n if not isinstance(pp, list):\r\n if pp.find(';') == -1:\r\n # if not a list and doesn't include ';' it's a key and\r\n # next item is the value\r\n d[pp.strip()] = self._convertToDict(next(itp))\r\n else:\r\n s = pp.split(';')\r\n if not pp.endswith(';'):\r\n # last item is a key and next item is the value\r\n d[s[-1].strip()] = self._convertToDict(next(itp))\r\n s = s[:-1]\r\n for ppp in s:\r\n ss = ppp.split()\r\n if ss:\r\n d[ss[0].strip()] = ' '.join(ss[1:]).strip()\r\n return d", "def arglist_parse_to_dict(arg_l):\n\n prop_d = {}\n for prop in arg_l:\n if len(prop) == 2:\n prop_l = prop\n elif ':' in prop:\n prop_l = prop.split(':')\n elif '=' in prop:\n prop_l = prop.split('=')\n else:\n exit( \"==> ERROR: invalid config. Use '=' or ':'.\" )\n if not len(prop_l) == 2:\n exit( \"==> ERROR: invalid config. Use one '=' per setting.\" )\n prop_d[prop_l[0]] = prop_l[1]\n return prop_d", "def parse_entry(lines):\n entry = {}\n for line in lines:\n line = line.replace('\\n', '').replace('\\r', '')\n if ':: ' in line:\n (key, value) = line.split(':: ')\n value = base64.b64decode(value).decode('utf-8')\n elif ': ' in line:\n (key, value) = line.split(': ')\n else:\n continue\n if key not in entry:\n entry[key] = []\n entry[key].append(value)\n return entry", "def string_to_keypair(self, data): \n return keypair_lst", "def parsePemList(self, s):\r\n x509List = []\r\n bList = dePemList(s, \"CERTIFICATE\")\r\n for b in bList:\r\n x509 = X509()\r\n x509.parseBinary(b)\r\n x509List.append(x509)\r\n self.x509List = x509List", "def list_to_dict(sep, lst):\n mydict = {}\n for ele in range(len(lst)):\n pos = lst[ele].find(':')\n mydict[lst[ele][:pos]] = lst[ele][pos+1:]\n return mydict", "def makeDict(self, s):\n out = {}\n entries = s.split(self.dataDelimiterEntry)\n for e in entries:\n if e == \"\":\n continue\n c = e.split(self.dataDelimiterKey)\n out[c[0]] = c[1]\n return out", "def parseconfig_se(cfile):\n cdict = {}\n f = open(cfile,'r')\n lines = f.readlines()\n for l in lines:\n a = string.split(l)\n if len(a) > 0:\n if a[0][0] != '#':\n maxi = len(a)\n for i in range(1,len(a)):\n if a[i][0] == '#':\n maxi = i\n break\n # Turn comma-separated lists into python lists\n entry = []\n for e in a[1:maxi]:\n if string.find(e,','):\n entry = entry + string.split(e,',')\n else:\n entry = entry + [e]\n cdict[a[0]] = entry\n return cdict", "def _parse_tags(tags: str):\n return dict(item.split(\":\") for item in shlex.split(tags)) # type: ignore", "def _convert_tags_to_dict(text_list_tags):\n return OrderedDict([re.findall(r\"\"\"\\s*_(\\w+)\\s+(.+?)\\s*$\"\"\", row)[0] for row in text_list_tags])", "def _parse_opensslconf(self):\n# print \"parse_opensslconf\"\n _log.debug(\"__init__::parse_opensslconf\")\n if not self.config.read(self.configfile):\n# print \"could not parse config file\"\n # Empty openssl.conf file or could not successfully parse the file.\n self.new_opensslconf()\n configuration = {}\n for section in self.__class__.DEFAULT.keys():\n for option in self.__class__.DEFAULT[section].keys():\n raw = self.config.get(section, option)\n value = raw.split(\"#\")[0].strip() # Remove comments\n\n if \"$\" in value: # Manage OpenSSL variables\n variable = \"\".join(value.split(\"$\")[1:])\n variable = variable.split(\"/\")[0]\n if variable == \"calvindir\":\n varvalue = _conf.install_location()\n else:\n varvalue = self.config.get(section, variable).split(\"#\")[0].strip()\n if \"$calvindir\" in varvalue:\n varvalue = _conf.install_location() + \"/\" + \"/\".join(varvalue.split(\"/\")[1:])\n path = \"/\" + \"/\".join(value.split(\"/\")[1:])\n value = varvalue + path\n try:\n configuration[section].update({option: value})\n except KeyError:\n configuration[section] = {} # New section\n configuration[section].update({option: value})\n return configuration", "def potcar_str2dict(potcar_list: Optional[str]) -> dict:\n if potcar_list is None:\n return {}\n elif isinstance(potcar_list, str):\n potcar_list = potcar_list.split()\\\n\n d = {}\n for p in potcar_list:\n element = p.split(\"_\")[0]\n d[element] = p\n return d", "def create_dict_from_list(parse_list, key_ind, *val_inds):\n parse_dict=defaultdict(list)\n for string in parse_list:\n if not val_inds:\n parse_dict[string[key_ind]]=string\n else:\n parse_dict[string[key_ind]]=[string[i] for i in range(len(string)) if i in val_inds]\n return(parse_dict)", "def parse_list_output(output):\n lines = output.splitlines()\n keys = filter(None, lines[1].split('|'))\n keys = [x.lower().strip() for x in keys]\n r = []\n for line in lines[3:-1]:\n if len(line.split()) <= 1:\n continue\n values = filter(None, line.split('|'))\n values = [x.strip() for x in values]\n assert len(keys) == len(values)\n record = dict(zip(keys, values))\n r.append(record)\n return r", "def _raw_misc_to_dict(raw):\n ret = {}\n for elem in raw:\n key, _, val = elem.partition(',')\n key = key.lstrip(\"(\").strip()\n val = val[:-1].strip()\n ret[key] = val\n return ret", "def parse_lines_to_dict(lines):\n res = {k: v.strip() for k, v in (m.split(':', 1) for m in lines)}\n return res", "def _parse_parameters(parameters):\n\n if not re.match(r'^(\\w+)=\"([^=]+)\"(\\s{1}(\\w+)=\"([^=]+)\")*$', parameters):\n raise ValueError\n\n # first we add tokens that separate key/value pairs.\n # in case of key='ss sss ss', we skip tokenizing when we se the first single quote\n # and resume when we see the second\n replace_space = True\n tokenized = \"\"\n for c in parameters:\n if c == '\\\"':\n replace_space = not replace_space\n elif c == ' ' and replace_space:\n tokenized += \"$$\"\n else:\n tokenized += c\n\n # now get the tokens\n tokens = tokenized.split('$$')\n result = {}\n for token in tokens:\n # separate key/values\n key_value = token.split(\"=\")\n result[key_value[0]] = key_value[1]\n return result", "def preprocess_spends_list(cls, spends_list):\n return dict(spends_list)", "def _parse(file_contents):\n\n if file_contents is None or file_contents == '':\n return {}\n\n result = {}\n\n for line in file_contents.splitlines():\n # Full line comment\n if line[:1] == '#':\n continue\n\n parts = line.split('=', 1)\n\n # Not a full key-value pair.\n if len(parts) < 2:\n continue\n\n result[parts[0].strip()] = parts[1].strip()\n\n return result", "def _parse_long(value):\n dict_value = {}\n\n for line in value.split('\\n'):\n if ':' in line:\n k, v = line.split(':', 1)\n dict_value[k.strip()] = v.strip()\n\n return dict_value", "def _list2pair(s_list):\n return s_list.pair", "def dePemList(s, name):\r\n bList = []\r\n prefix = \"-----BEGIN %s-----\" % name\r\n postfix = \"-----END %s-----\" % name\r\n while 1:\r\n start = s.find(prefix)\r\n if start == -1:\r\n return bList\r\n end = s.find(postfix, start+len(prefix))\r\n if end == -1:\r\n raise SyntaxError(\"Missing PEM postfix\")\r\n s2 = s[start+len(prefix) : end]\r\n retBytes = a2b_base64(s2) # May raise SyntaxError\r\n bList.append(retBytes)\r\n s = s[end+len(postfix) : ]", "def _parse_parameter_list(\n parameter_list: abc.Iterable[str],\n normalize_parameter_names: bool = False,\n normalize_parameter_values: bool = True,\n strip_interior_whitespace: bool = False) -> list[tuple[str, str]]:\n parameters = []\n for param in parameter_list:\n param = param.strip()\n if param:\n name, value = param.split('=')\n if strip_interior_whitespace:\n name, value = name.strip(), value.strip()\n if normalize_parameter_names:\n name = name.lower()\n if normalize_parameter_values:\n value = value.lower()\n parameters.append((name, _dequote(value.strip())))\n return parameters", "def list_to_dict(list: list, keys: list):\n dictionary = dict()\n for key in keys:\n try:\n index = list.index(f'{key}:')\n dictionary[list[index].strip(':')] = list[index + 1]\n except ValueError:\n print(f'{key} not found!')\n return dictionary", "def _parse_config_args(args):\r\n config_dict = dict()\r\n for config_str in args:\r\n try:\r\n components = config_str.split('=')\r\n if len(components) >= 2:\r\n config_dict[components[0]] = \"=\".join(components[1:])\r\n\r\n except:\r\n print \"Warning: could not interpret config value '{0}'\".format(config_str)\r\n pass\r\n\r\n return config_dict", "def _parse(self, inval):\n regex = re.compile(r'^os\\.environ\\[(.*)\\]$')\n for val in inval:\n if val is None:\n continue\n # split on \\n\n cmd = val.split('\\n')\n for v2 in cmd:\n if not v2:\n continue\n dict_call, pth = v2.split(' = ')\n m = re.match(regex, dict_call)\n if m:\n key = m.groups()[0]\n self.env[key] = pth", "def split_config(s):\n x = re.split(r\";\", s)\n d = {k: v for (k, v) in [i.split(\"=\") for i in x]}\n return d", "def parseConfStr(confStr):\n pairList = []\n specs = confStr.split(';')\n for spec in specs:\n if not spec:\n continue\n spec = spec.strip()\n splits = spec.split(',')\n splits = [ss.strip(\"()\") for ss in splits]\n splits = tuple(splits)\n pairList.append(splits)\n return pairList", "def _convert_param_list_to_dict(param_list: list, parameters_dict: dict) -> dict:\n for param in param_list:\n param_array: list = param.split(\"=\")\n key: str = param_array[0]\n value: str = None\n if len(param_array) > 1:\n value = param_array[1]\n parameters_dict[key] = value\n return parameters_dict", "def _parse_line(self):\n # check if line contains a rule or not\n stripped = self._line.strip()\n if not stripped or stripped.startswith(\"#\"):\n return None\n\n # strip out double quotes from values, and simplify equals strings\n simplified = self._line.replace(\"==\", \"=\").replace('\"', '')\n\n # return a dictionary formed from the key=value pairs found in line\n return dict(f.strip().split(\"=\", 1) for f in simplified.split(\",\"))", "def rawtodictonary(rawstring):\n\n if is_python3():\n rawstring = bytes(rawstring).decode('utf-8') \n \n raw = rawstring.split(NULL)[:-2]\n\n data = {}\n for i in range(0,len(raw) - 1,2):\n\n key,val = raw[i], raw[i+1]\n keyel = key.split(\"\\x1c\")\n\n if len(keyel) == 1:\n if key in data:\n data[key][None] = val\n else:\n data[key]=val\n else:\n if keyel[0] in data and not isinstance(data[keyel[0]], dict):\n data[keyel[0]]={ None: data[keyel[0]] }\n\n try:\n data[keyel[0]][keyel[1]] = val\n except TypeError:\n data[keyel[0]] = {keyel[1] : val}\n except KeyError:\n data[keyel[0]] = {keyel[1] : val}\n\n return data", "def parse_list(value: str) -> list[str]:\n segments = _QUOTED_SEGMENT_RE.findall(value)\n for segment in segments:\n left, match, right = value.partition(segment)\n value = ''.join([left, match.replace(',', '\\000'), right])\n return [_dequote(x.strip()).replace('\\000', ',') for x in value.split(',')]", "def splitkv(s):\n a=re.split('(\\w*)\\s*=\\s*\"([^=\"]*)\"\\s*', s)\n a=[ t for t in a if t!='']\n return a", "def extract_key_value_pairs(string, joiner='=', separator=','):\n return dict([x.strip() for x in s.split(joiner, 1)] for s in string.split(separator))", "def parse_config(self, data):\n match = re.search(\"-----BEGIN RSA PRIVATE KEY-----.*\" + \\\n \"-----END RSA PRIVATE KEY-----\\n\", data, \n re.MULTILINE | re.DOTALL)\n if not match:\n raise Exception(\"Private key not found\")\n key = match.group()\n\n match = re.search(\"-----BEGIN CERTIFICATE-----.*\" + \\\n \"-----END CERTIFICATE-----\\n\", data, \n re.MULTILINE | re.DOTALL)\n if not match:\n raise Exception(\"Certificate not found\")\n cert = match.group()\n # config also contains allowed, dns, but we don't use that for GCMU\n return (cert, key)", "async def parse(self, raw: str) -> dict:", "def headers_raw_to_dict(headers_raw):\n\n if headers_raw is None:\n return None\n headers = headers_raw.splitlines()\n headers_tuples = [header.split(':', 1) for header in headers]\n\n result_dict = {}\n for header_item in headers_tuples:\n if not len(header_item) == 2:\n continue\n\n item_key = header_item[0].strip()\n item_value = header_item[1].strip()\n result_dict[item_key] = item_value\n\n return result_dict", "def parse_string_list(data):\n txt = data.decode()\n x = ast.literal_eval(txt)\n return x", "def decodemeta(data):\n d = {}\n for l in data.split('\\0'):\n if l:\n key, value = l.split(':')\n d[key] = value\n return d", "def _find_keypair_list(xml_str):\r\n match1 = re.search('keyPairId=\\\"(\\S*)\\\"', xml_str)\r\n match2 = re.search('keyPairName=\\\"(\\S*)\\\"', xml_str)\r\n keypair_list = [\r\n {\r\n \"keyId\": match1.group(1),\r\n \"keyName\": match2.group(1)\r\n }\r\n ]\r\n return keypair_list", "def parse(pem_str):\n return [_PEM_TO_CLASS[match.group(1)](match.group(0))\n for match in _PEM_RE.finditer(pem_str)]", "def openssl_config_strip(data):\n result = []\n for line in data.split(\"\\n\"):\n work_line = line.strip()\n work_line = re.sub(\"([^#]*)#.*?$\", \"\\\\1\", work_line)\n if len(work_line) > 0:\n result.append(work_line)\n return \"\\n\".join(result)", "def _parse_goauth_token(self, token):\n data = {}\n for d in token.split('|'):\n v = d.split('=')\n data[v[0]] = v[1]\n return data", "def __parse_options_dict(options_str):\n # type: (str) -> Dict[str, str]\n opts = options_str.split('&') # type: List[str]\n res = {} # Type: Dict\n\n for opt in opts:\n key, value = opt.split('=') # type: List[str, str]\n res[key] = value # type: str\n\n return res", "def _process_cookiestr(cookieStr):\n cookie_list = []\n # parses cookie str if a cookie has been set\n for cookie in cookieStr.split('\\n'):\n c = {}\n for cook in cookie.split(';'):\n token = cook.split('=', 1)\n if len(token) < 2: \n # usually this is just a flag e.g HTTPOnly, HTTPSOnly\n continue\n c[token[0]] = token[1]\n cookie_list.append(c)\n return cookie_list", "def parse_input(input_file):\n lines = open(input_file).read().split('\\n\\n')\n rules = {}\n for line in lines[0].splitlines():\n key, values = line.split(':')\n\n items = values.replace(' or ', ' ').split()\n rules[key] = items\n my_ticket = lines[1].splitlines()[1]\n tickets = lines[2].splitlines()[1:]\n return rules, my_ticket, tickets", "def env_lines_to_dict(self, env_lines):\n env_dict = {}\n for env_line in env_lines:\n split_env_line = shlex.split(env_line)\n if split_env_line[0] == \"export\":\n split_env_line = split_env_line[1:]\n for item in split_env_line:\n if \"=\" in item:\n k, v = item.split(\"=\", 1)\n env_dict[k] = v\n return env_dict", "def _env(env):\n return dict((key.strip(), val)\n for line in env.strip().splitlines()\n for key, _, val in [line.partition('=')])", "def parse_file(input_lst):\n word_dct = {}\n for line in input_lst:\n raw_output = line.split() # these are lists of strings\n for str_ in raw_output: # strings\n str_ = str_.lower()\n str_ = str_.replace(\"-\", \" \")\n str_ = str_.replace(\"?\", \"\")\n str_ = str_.replace(\"!\", \"\")\n str_ = str_.replace(\",\", \"\")\n str_ = str_.replace(\"\\'\", \"\")\n str_ = str_.replace('\\\"', \"\")\n str_ = str_.replace(\".\", \"\")\n if str_ not in word_dct:\n word_dct[str_] = 1\n else:\n word_dct[str_] += 1\n return word_dct", "def cgi_parse(qs):\n d = {}\n for key, value in cgi.parse_qsl(qs, 1):\n if d.has_key(key):\n if isinstance(d[key], list):\n d[key].append(value)\n else:\n d[key] = [d[key], value]\n else:\n d[key] = value\n return d", "def parse_entry(tr):\n\ttr = tr.replace('\"', '')\n\ttrl = tr.split(\"; \")\n\ttrdict = OrderedDict()\n\n\tfor j in trl:\n\t\tk = j.split(\" \")\n\n\t\tif k[0] in trdict:\n# print \"%s already in dict\" % (k[0])\n\t\t\ttrdict[k[0]].append(k[1])\n\t\telse: \n\t\t\ttrdict[k[0]]=[k[1]]\n\treturn trdict", "def makeGcauCfgDictFromAgc(lineList): \r\n diction = {}\r\n withinCfgData = False\r\n for eachString in lineList:\r\n if re.match(RE_COMPILED_CFG_START, eachString):\r\n withinCfgData = True\r\n elif re.match(RE_COMPILED_CFG_END, eachString):\r\n withinCfgData = False\r\n elif withinCfgData:\r\n p = re.match(RE_COMPILED_CFG_ITEM, eachString)\r\n if p:\r\n obj = p.groups()[0]\r\n attr = p.groups()[1]\r\n val = p.groups()[2]\r\n if obj not in diction:\r\n diction[obj] = {}\r\n diction[obj][attr] = val\r\n return diction", "def _reformat_load_versions(ctx, param, value) -> Dict[str, str]:\n load_version_separator = \":\"\n load_versions_dict = {}\n\n for load_version in value:\n load_version_list = load_version.split(load_version_separator, 1)\n if len(load_version_list) != 2:\n raise ValueError(\n \"Expected the form of `load_version` to be \"\n \"`dataset_name:YYYY-MM-DDThh.mm.ss.sssZ`,\"\n \"found {} instead\".format(load_version)\n )\n load_versions_dict[load_version_list[0]] = load_version_list[1]\n\n return load_versions_dict", "def parse(data, raw=False, quiet=False):\n jc.utils.compatibility(__name__, info.compatible, quiet)\n jc.utils.input_type_check(data)\n\n raw_output = {}\n\n if jc.utils.has_data(data):\n data = data.splitlines()\n\n # linux uses = and bsd uses :\n if ' = ' in data[0]:\n delim = ' = '\n else:\n delim = ': '\n\n for line in data:\n linedata = line.split(delim, maxsplit=1)\n\n # bsd adds values to newlines, which need to be fixed up with this try/except block\n try:\n key = linedata[0]\n value = linedata[1]\n\n # syctl -a repeats some keys on linux. Append values from repeating keys\n # to the previous key value\n if key in raw_output:\n existing_value = raw_output[key]\n raw_output[key] = existing_value + '\\n' + value\n continue\n\n # fix for weird multiline output in bsd\n # if the key looks strange (has spaces or no dots) then it's probably a value field\n # on a separate line. in this case, just append it to the previous key in the dictionary.\n if '.' not in key or ' ' in key:\n previous_key = [*raw_output.keys()][-1]\n raw_output[previous_key] = raw_output[previous_key] + '\\n' + line\n continue\n\n # if the key looks normal then just add to the dictionary as normal\n else:\n raw_output[key] = value\n continue\n\n # if there is an IndexError exception, then there was no delimiter in the line.\n # In this case just append the data line as a value to the previous key.\n except IndexError:\n prior_key = [*raw_output.keys()][-1]\n raw_output[prior_key] = raw_output[prior_key] + '\\n' + line\n continue\n\n if raw:\n return raw_output\n else:\n return _process(raw_output)", "def parse_info(s:str) -> dict:\n d = {}\n d[\"SVTYPE\"] = re.search(r'(?<=SVTYPE=)\\w+',s).group(0)\n d[\"SUPPORT\"] = re.search(r'(?<=SUPPORT=)\\d+',s).group(0)\n if d[\"SVTYPE\"] in [\"BND\"]:\n return d\n d[\"END\"] = re.search(r'(?<=END=)\\d+',s).group(0)\n if d[\"SVTYPE\"] in [\"INV\"]:\n return d\n d[\"SVLEN\"] = re.search(r'(?<=SVLEN=)(.*?)(?=;)',s).group(0)\n d[\"READS\"] = re.search(r'(?<=READS=)(.*?)(?=$)',s).group(0).split(\",\")\n if d[\"SVTYPE\"] == \"INS\":\n d[\"SEQS\"] = re.search(r'(?<=SEQS=)(.*?)(?=;)',s).group(0).split(\",\")\n return d", "def parse_list_str(setting_str):\n return re.split('\\s*,\\s*', setting_str)", "def _parse_headers(headers):\n try:\n return dict(header.split(\":\") for header in headers)\n except:\n raise ValueError(\"Invalid headers %s\" % headers)", "def parse(self, s):\r\n\r\n bytes = dePem(s, \"CERTIFICATE\")\r\n self.parseBinary(bytes)\r\n return self", "def parse_pairs(self):\n pass", "def _convert_to_dict(r):\n if not r:\n return r\n else:\n return dict(token=r[0], code=r[2], value=r[1], address='-')", "def parse_list(constant_list):\n\n values = dict()\n descriptions = dict()\n for (key, value, desc) in constant_list:\n values[key] = value\n descriptions[value] = desc\n return (values, descriptions)", "def _parseConfiguration (self, lineList : StringList):\n\n Logging.trace(\">>\")\n\n cls = self.__class__\n cls._mergeContinuationLines(lineList)\n\n for i, currentLine in enumerate(lineList):\n lineNumber = i + 1\n\n # remove leading and trailing white space from line\n currentLine = currentLine.strip()\n Logging.trace(\"--: (%d) %s\", i+1, currentLine)\n\n if (currentLine == \"\"\n or currentLine.startswith(cls._commentMarker)):\n # this is an empty line or comment line => skip it\n pass\n else:\n match = cls._keyValueRegExp.search(currentLine)\n\n if not match:\n Logging.traceError(\"bad line %d without key-value-pair\",\n lineNumber)\n else:\n key = match.group(1)\n value = match.group(2)\n value = self._expandVariables(value)\n value = cls._combineFragmentedString(value)\n self._keyToStringValueMap[key] = value\n Logging.trace(\"--: string value %r -> %r\", key, value)\n value = cls._adaptConfigurationValue(value)\n self._keyToValueMap[key] = value\n Logging.trace(\"--: adapted value %r -> %r\", key, value)\n\n Logging.trace(\"<<: %r\", self._keyToValueMap)", "def css2dict(css):\n cssdict = {}\n if None == css:\n return cssdict\n for pair in css.split(';'): #TODO: what about escaped separators\n if pair.find(':') >= 0:\n key, value = pair.split(':')\n cssdict[ key.strip() ] = value.strip()\n return cssdict", "def _parse_headers(raw_headers: List[str]) -> Dict[str, str]:\n headers: Dict[str, str] = {}\n for header in raw_headers:\n name = header[: header.find(\":\")].strip()\n value = header[header.find(\":\") + 1 :].strip()\n headers[name.lower()] = value\n\n return headers", "def get_dict(lst, rcs=None, pos=None):\n digits = lst[0].union(*lst)\n no_digits = dict()\n for dig in digits:\n where = []\n for s in enumerate(lst):\n if dig in s[1]:\n if rcs == \"S\":\n where.append(pos[s[0]])\n elif rcs == \"R\":\n where.append((pos, s[0]))\n elif rcs == \"C\":\n where.append((s[0], pos))\n else:\n where.append(s[0])\n no_digits[dig] = where\n return no_digits", "def _parse_states(self, raw_states, logger):\n jobs = raw_states.splitlines()\n parsed = {}\n if jobs and (len(jobs) > 1 or jobs[0] != ''):\n for job in jobs:\n first, second = job.strip().split(',')\n parsed[first] = self._parse_exit_codes(second)\n\n return parsed", "def parse(\n data: str,\n raw: bool = False,\n quiet: bool = False\n) -> Dict:\n jc.utils.compatibility(__name__, info.compatible, quiet)\n jc.utils.input_type_check(data)\n\n raw_output: Dict = {}\n split_me = {'it_value:', 'it_interval:'}\n\n if jc.utils.has_data(data):\n\n for line in filter(None, data.splitlines()):\n\n # epoll files\n if line.startswith('tfd:'):\n line_match = re.findall(r'(?P<key>\\S+):(?:\\s+)?(?P<val>\\S+s*)', line)\n if line_match:\n raw_output.update({'epoll': {k.strip(): v.strip() for k, v in line_match}})\n continue\n\n # inotify files\n if line.startswith('inotify'):\n split_line = line[8:].split()\n raw_output['inotify'] = {}\n for item in split_line:\n k, v = item.split(':', maxsplit=1)\n raw_output['inotify'][k] = v\n continue\n\n # fanotify files\n if line.startswith('fanotify'):\n split_line = line[9:].split()\n\n if not 'fanotify' in raw_output:\n raw_output['fanotify'] = {}\n\n for item in split_line:\n k, v = item.split(':', maxsplit=1)\n raw_output['fanotify'][k] = v\n continue\n\n # timerfd files\n if line.split()[0] in split_me:\n split_line = line.replace(':', '').replace('(', '').replace(')', '').replace(',', '').split()\n raw_output[split_line[0]] = [int(x) for x in split_line[1:]]\n continue\n\n key, val = line.split(':', maxsplit=1)\n raw_output[key.strip()] = val.strip()\n continue\n\n return raw_output if raw else _process(raw_output)", "def _parse(source: str) -> Store:\n parsed_data = {}\n\n with open(source) as env_file:\n for line in env_file:\n line = line.strip() # noqa: WPS440\n\n if not line or line.startswith('#') or '=' not in line:\n # Ignore comments and lines without assignment.\n continue\n\n # Remove whitespaces and quotes:\n env_name, env_value = line.split('=', 1)\n env_name = env_name.strip()\n env_value = env_value.strip().strip('\\'\"')\n parsed_data[env_name] = env_value\n\n return parsed_data", "def _base64_to_der_keylist(base64_keylist: List[str], keyidv2_list: List[int]) -> List[Tuple[bytes, Optional[int]]]:\n res = []\n for idx, entry in enumerate(base64_keylist):\n keyidv2 = keyidv2_list[idx] if idx < len(keyidv2_list) else None\n res.append((base64.b64decode(entry), keyidv2))\n return res", "def getSurlTokenDictionary(lfns, tokens):\n\n dictionary = {}\n\n if len(lfns) == len(tokens):\n dictionary = dict(zip(lfns, tokens))\n else:\n tolog(\"!!WARNING!!2233!! Cannot create dictionary from lists of different lengths: %s, %s\" % (str(lfns), str(tokens)))\n\n return dictionary", "def test__format_asn_dict(self, parser):\n for key, value in RPKI_Validator_Wrapper.get_validity_dict().items():\n d = {'asn': 'AS198051', 'prefix': '1.2.0.0/16', 'validity': key}\n assert parser._format_asn_dict(d) == [198051, '1.2.0.0/16', value]", "def parse_bibtex(self, data: str) -> Dict:\n\n new_bib = [line for line in data.splitlines() if \"= ,\" not in line]\n new_bib = \"\\n\".join(new_bib)\n bib_db: bibtexparser.bibdatabase.BibDatabase = bibtexparser.loads(new_bib)\n result = dict()\n for entry in bib_db.entries:\n osti_id = entry[\"ID\"].split(\"_\")[1]\n result[osti_id] = entry\n return result", "def parse_output(output):\n lines = output.splitlines()[3:-1]\n r = {}\n for line in lines:\n kv = filter(None, line.split('|'))\n kv = [x.strip() for x in kv]\n r.update({kv[0]: kv[1]})\n return r", "def word_pairs_decode(secret_key):\r\n dictionary_decode = {}\r\n for line in open(secret_key):\r\n line = line.split()\r\n dictionary_decode[line[1]] = line[0]\r\n return dictionary_decode", "def str2dict(string):\n res_dict = {}\n for keyvalue in string.split(','):\n (key, value) = keyvalue.split('=', 1)\n res_dict[key] = value\n return res_dict", "def message_to_dict(message):\n message_dict = {}\n if isinstance(message, str):\n tmp = re.sub(\"[{}\\\"]\", '', message).split(',')\n for string in tmp:\n var = string.split(':')\n message_dict[var[0]] = var[1]\n return message_dict", "def parse_from_string(config_pair):\n key, value = config_pair.split(\"=\")\n value = literal_eval(value)\n current_config_keys = key.split('.')[::-1]\n last_config_value = {current_config_keys[0]: value}\n for current_config_subkey in current_config_keys[1:]:\n last_config_value = {current_config_subkey: last_config_value}\n return last_config_value", "def parse_params(params):\n pairs = params.split(' ')\n content = dict()\n for key, value in [pair.split('=') for pair in pairs]:\n content[key] = int(value)\n return content", "def _result_to_dict(line):\n f = line.split(':;')\n return {'server': f[0], 'os_name': f[1], 'status': f[2], 'ipv4': f[3]}", "def _parse_spec(self):\n\n key, value = self._lines.current.split(':', 1)\n key, value = key.strip(), value.strip()\n value = int(value) if key in self._INT_PROPERTIES else value\n\n try:\n next(self._lines)\n except StopIteration:\n pass\n\n return {key: value}", "def _parse_spec(self):\n\n key, value = self._lines.current.split(':', 1)\n key, value = key.strip(), value.strip()\n value = int(value) if key in self._INT_PROPERTIES else value\n\n try:\n next(self._lines)\n except StopIteration:\n pass\n\n return {key: value}", "def parse(cls, to_parse):\n if type(to_parse) not in (list, tuple):\n raise BCURStringFormatError(\n f\"{to_parse} is of type {type(to_parse)}, not a list/tuple\"\n )\n\n payloads = []\n global_checksum, global_y = \"\", 0\n for cnt, bcur_string in enumerate(to_parse):\n entry_payload, entry_checksum, entry_x, entry_y = _parse_bcur_helper(\n bcur_string=bcur_string\n )\n if cnt + 1 != entry_x:\n raise ValueError(\n f\"BCUR strings not in order: got {entry_x} and was expecting {cnt+1}\"\n )\n\n # Initialize checksum and y (as in x-of-y) on first loop\n if cnt == 0:\n global_checksum = entry_checksum\n global_y = entry_y\n\n elif entry_checksum != global_checksum:\n raise ValueError(\n f\"Entry {bcur_string} has checksum {entry_checksum} but we're expecting {global_checksum}\"\n )\n elif entry_y != global_y:\n raise ValueError(\n f\"Entry {bcur_string} wants {entry_y} parts but we're expecting {global_y} parts\"\n )\n # All checks pass\n payloads.append(entry_payload)\n\n # will throw an error if checksum is incorrect\n enc = bcur_decode(data=\"\".join(payloads), checksum=global_checksum)\n\n return cls(text_b64=b2a_base64(enc).strip().decode(), checksum=global_checksum)", "def parse_auth_response(text: str) -> dict[str, str]:\n response_data = {}\n for line in text.split(\"\\n\"):\n if not line:\n continue\n\n key, _, val = line.partition(\"=\")\n response_data[key] = val\n\n return response_data", "def _parse_tags(self):\n tokens = self.tags_str[1:].split(\";\")\n self._tags = {\n k.strip(): v\n for token in tokens\n for k, v in [token.split(\"=\")]\n }", "def dictFromLines(lines,sep=None):\n reComment = re.compile('#.*')\n temp = [reComment.sub('',x).strip() for x in lines.split('\\n')]\n if sep == None or type(sep) == type(''):\n temp = dict([x.split(sep,1) for x in temp if x])\n else: #--Assume re object.\n temp = dict([sep.split(x,1) for x in temp if x])\n return temp", "def parse_ref_file(file, start_pos=2, end_pos=7):\n data = []\n ref_dict = {}\n try:\n with open(file) as f:\n content = f.read()\n data = [word for lines in content.split(';')[start_pos: end_pos] for word in lines.splitlines(True) if '=' in word]\n for item in data:\n k = item.split('=')[0].strip().strip(\"'\")\n v = item.split('=')[1].strip().strip(\"'\") \n if k not in ref_dict:\n ref_dict[k] = v\n except Exception as e:\n logger.info('Failed to parse SAS dictionary...')\n return {}\n return ref_dict", "def convert_attr_to_dict(attr):\n\n\tresult = dict()\n\tattr = attr.split(';')\n\tattrlist = [a.split(':') for a in attr]\n\tfor pair in attrlist:\n\t\tif len(pair) == 2:\n\t\t\tkey = pair[0]\n\t\t\tvalue = pair[1]\n\t\t\tresult[key] = value\n\n\treturn result", "def str2dic(self, string):\n dic = {}\n list0=string.split(\"&\")\n for i in list0:\n list2 = i.split(\"=\")\n dic[list2[0]] = list2[1]\n return dic", "def parse_api_header(content):\n result = {}\n for line in content.split('\\n'):\n for kw in KEYWORDS:\n if kw in line:\n _, _, value = line.strip().split()\n value = value.strip()\n\n if kw == oqs.CRYPTO_ALGNAME:\n assert value[0] == value[-1] == '\"'\n value = value[1:-1]\n\n result[kw] = value\n break\n\n return result", "def stylecrunch(stystr):\n return dict(pair.split(\":\") for pair in semicolons.findall(stystr))", "def parse_input(self, input_string):\n ip_addresses = {}\n\n tree = ET.parse(StringIO(input_string))\n root = tree.getroot()\n\n for e1 in root:\n if e1.tag == \"host\":\n host = e1\n\n ports = None\n address = None\n\n for e2 in host:\n if e2.tag == \"ports\":\n ports = e2\n if e2.tag == \"address\" and e2.attrib['addrtype'] != \"mac\":\n address = e2\n\n if ports is None:\n continue\n\n ip_address = address.attrib['addr']\n if ip_address not in ip_addresses:\n ip_addresses[ip_address] = []\n\n for port in ports:\n\n if port.tag != \"port\":\n continue\n\n port_protocol = port.attrib[\"protocol\"]\n port_number = str(port.attrib['portid'])\n port_state = port.find(\"state\").attrib['state']\n\n # lets only pay attention to open ports\n if port_state in [\"open\"]:\n s = dumps([port_protocol, port_number, port_state])\n\n ip_addresses[ip_address].append(s)\n\n service = port.find(\"service\")\n\n if \"name\" in service.attrib:\n s = dumps([port_protocol, port_number, port_state, service.attrib[\"name\"]])\n ip_addresses[ip_address].append(s)\n\n #s = dumps([port_protocol, port_state, service.attrib[\"name\"]])\n #ip_addresses[ip_address].append(s)\n\n s_list = [port_protocol, port_number, port_state, service.attrib[\"name\"]]\n #s_no_port_list = [port_protocol, port_state, service.attrib[\"name\"]]\n\n for sid in [\"product\", \"version\", \"extrainfo\", \"servicefp\"]:\n if sid in service.attrib:\n s_list.append(service.attrib[sid])\n s = dumps(s_list)\n ip_addresses[ip_address].append(s)\n\n #s_no_port_list.append(service.attrib[sid])\n #s = dumps(s_no_port_list)\n #ip_addresses[ip_address].append(s)\n\n for script_element in port:\n if script_element.tag != \"script\":\n continue\n # todo parse script tag from xml\n script_id = script_element.attrib[\"id\"]\n\n for table in script_element:\n if table.tag == \"table\":\n for elem in table:\n key = \"\"\n if \"key\" in elem.attrib:\n key = elem.attrib[\"key\"]\n\n if elem.text is not None:\n s = dumps([port_protocol, port_number, port_state, service.attrib[\"name\"],\n script_id, key, elem.text])\n else:\n s = dumps(\n [port_protocol, port_number, port_state, service.attrib[\"name\"],\n script_id, key])\n ip_addresses[ip_address].append(s)\n\n if table.tag == \"elem\":\n elem = table\n key = \"\"\n if \"key\" in elem.attrib:\n key = elem.attrib[\"key\"]\n\n if elem.text is not None:\n s = dumps([port_protocol, port_number, port_state, service.attrib[\"name\"],\n script_id, key, elem.text])\n else:\n s = dumps(\n [port_protocol, port_number, port_state, service.attrib[\"name\"],\n script_id, key])\n ip_addresses[ip_address].append(s)\n\n print \"no of IP's taken from NMAP: \" + str(len(ip_addresses.viewkeys()))\n return ip_addresses", "def parse_config_string(config_string, issue_warnings=True):\n config_dict = {}\n my_splitter = shlex.shlex(config_string, posix=True)\n my_splitter.whitespace = ','\n my_splitter.whitespace_split = True\n for kv_pair in my_splitter:\n kv_pair = kv_pair.strip()\n if not kv_pair:\n continue\n kv_tuple = kv_pair.split('=', 1)\n if len(kv_tuple) == 1:\n if issue_warnings:\n MsafConfigWarning.warn(\n (\"Config key '%s' has no value, ignoring it\" %\n kv_tuple[0]), stacklevel=1)\n else:\n k, v = kv_tuple\n # subsequent values for k will override earlier ones\n config_dict[k] = v\n return config_dict", "def parsePrivateKey(s):\r\n return parsePEMKey(s, private=True)", "def parse_launch_arguments(launch_arguments: List[Text]) -> List[Tuple[Text, Text]]:\n parsed_launch_arguments = OrderedDict() # type: ignore\n for argument in launch_arguments:\n count = argument.count(':=')\n if count == 0 or argument.startswith(':=') or (count == 1 and argument.endswith(':=')):\n raise RuntimeError(\n \"malformed launch argument '{}', expected format '<name>:=<value>'\"\n .format(argument))\n name, value = argument.split(':=', maxsplit=1)\n parsed_launch_arguments[name] = value # last one wins is intentional\n return parsed_launch_arguments.items()", "def openssl_cnf(filename, sans):\n\n str_sans = \"\"\n if type(sans) is str:\n str_sans = sans\n else:\n for san in sans:\n if len(str_sans) > 0:\n str_sans = str_sans + \"\\n\"\n str_sans = str_sans + san\n\n cnf = \"\"\"\\\n#\n# OpenSSL example configuration file.\n# This is mostly being used for generation of certificate requests.\n#\n\n# Note that you can include other files from the main configuration\n# file using the .include directive.\n#.include filename\n\n# This definition stops the following lines choking if HOME isn't\n# defined.\nHOME\t\t\t= .\n\n# Extra OBJECT IDENTIFIER info:\n#oid_file\t\t= $ENV::HOME/.oid\noid_section\t\t= new_oids\n\n# To use this configuration file with the \"-extfile\" option of the\n# \"openssl x509\" utility, name here the section containing the\n# X.509v3 extensions to use:\n# extensions\t\t=\n# (Alternatively, use a configuration file that has only\n# X.509v3 extensions in its main [= default] section.)\n\n[ new_oids ]\n\n# We can add new OIDs in here for use by 'ca', 'req' and 'ts'.\n# Add a simple OID like this:\n# testoid1=1.2.3.4\n# Or use config file substitution like this:\n# testoid2=${testoid1}.5.6\n\n# Policies used by the TSA examples.\ntsa_policy1 = 1.2.3.4.1\ntsa_policy2 = 1.2.3.4.5.6\ntsa_policy3 = 1.2.3.4.5.7\n\n####################################################################\n[ ca ]\ndefault_ca\t= CA_default\t\t# The default ca section\n\n####################################################################\n[ CA_default ]\n\ndir\t\t= ./test-pki\t\t# Where everything is kept\ncerts\t\t= $dir/certs\t\t# Where the issued certs are kept\ncrl_dir\t\t= $dir/crl\t\t# Where the issued crl are kept\ndatabase\t= $dir/index.txt\t# database index file.\nunique_subject\t= no\t\t\t# Set to 'no' to allow creation of\n # several certs with same subject.\nnew_certs_dir\t= $dir/newcerts\t\t# default place for new certs.\n\ncertificate\t= $dir/cacert.pem \t# The CA certificate\nserial\t\t= $dir/serial \t\t# The current serial number\ncrlnumber\t= $dir/crlnumber\t# the current crl number\n # must be commented out to leave a V1 CRL\ncrl\t\t= $dir/crl.pem \t\t# The current CRL\nprivate_key\t= $dir/private/cakey.pem# The private key\n\nx509_extensions\t= usr_cert\t\t# The extensions to add to the cert\n\n# Comment out the following two lines for the \"traditional\"\n# (and highly broken) format.\nname_opt \t= ca_default\t\t# Subject Name options\ncert_opt \t= ca_default\t\t# Certificate field options\n\n# Extension copying option: use with caution.\ncopy_extensions = none\n\n# Extensions to add to a CRL. Note: Netscape communicator chokes on V2 CRLs\n# so this is commented out by default to leave a V1 CRL.\n# crlnumber must also be commented out to leave a V1 CRL.\n# crl_extensions\t= crl_ext\n\ndefault_days\t= 365\t\t\t# how long to certify for\ndefault_crl_days= 30\t\t\t# how long before next CRL\ndefault_md\t= default\t\t# use public key default MD\npreserve\t= no\t\t\t# keep passed DN ordering\n\n# A few difference way of specifying how similar the request should look\n# For type CA, the listed attributes must be the same, and the optional\n# and supplied fields are just that :-)\npolicy\t\t= policy_anything\n\n# For the CA policy\n[ policy_match ]\ncountryName\t\t= match\nstateOrProvinceName\t= match\norganizationName\t= match\norganizationalUnitName\t= optional\ncommonName\t\t= supplied\nemailAddress\t\t= optional\n\n# For the 'anything' policy\n# At this point in time, you must list all acceptable 'object'\n# types.\n[ policy_anything ]\ncountryName\t\t= optional\nstateOrProvinceName\t= optional\nlocalityName\t\t= optional\norganizationName\t= optional\norganizationalUnitName\t= optional\ncommonName\t\t= supplied\nemailAddress\t\t= optional\n\n####################################################################\n[ req ]\ndefault_bits\t\t= 2048\ndefault_keyfile \t= privkey.pem\ndistinguished_name\t= req_distinguished_name\nattributes\t\t= req_attributes\nx509_extensions\t= v3_ca\t# The extensions to add to the self signed cert\n\n# Passwords for private keys if not present they will be prompted for\n# input_password = secret\n# output_password = secret\n\n# This sets a mask for permitted string types. There are several options.\n# default: PrintableString, T61String, BMPString.\n# pkix\t : PrintableString, BMPString (PKIX recommendation before 2004)\n# utf8only: only UTF8Strings (PKIX recommendation after 2004).\n# nombstr : PrintableString, T61String (no BMPStrings or UTF8Strings).\n# MASK:XXXX a literal mask value.\n# WARNING: ancient versions of Netscape crash on BMPStrings or UTF8Strings.\nstring_mask = utf8only\n\n# req_extensions = v3_req # The extensions to add to a certificate request\n\nsubjectAltName = @alt_names\n\n[ req_distinguished_name ]\ncountryName\t\t\t= Country Name (2 letter code)\ncountryName_default\t\t= AU\ncountryName_min\t\t\t= 2\ncountryName_max\t\t\t= 2\n\nstateOrProvinceName\t\t= State or Province Name (full name)\nstateOrProvinceName_default\t= Some-State\n\nlocalityName\t\t\t= Locality Name (eg, city)\n\n0.organizationName\t\t= Organization Name (eg, company)\n0.organizationName_default\t= Internet Widgits Pty Ltd\n\n# we can do this but it is not needed normally :-)\n#1.organizationName\t\t= Second Organization Name (eg, company)\n#1.organizationName_default\t= World Wide Web Pty Ltd\n\norganizationalUnitName\t\t= Organizational Unit Name (eg, section)\n#organizationalUnitName_default\t=\n\ncommonName\t\t\t= Common Name (e.g. server FQDN or YOUR name)\ncommonName_max\t\t\t= 64\n\nemailAddress\t\t\t= Email Address\nemailAddress_max\t\t= 64\n\n# SET-ex3\t\t\t= SET extension number 3\n\n[ req_attributes ]\nchallengePassword\t\t= A challenge password\nchallengePassword_min\t\t= 4\nchallengePassword_max\t\t= 20\n\nunstructuredName\t\t= An optional company name\n\n[ usr_cert ]\n\n# These extensions are added when 'ca' signs a request.\n\n# This goes against PKIX guidelines but some CAs do it and some software\n# requires this to avoid interpreting an end user certificate as a CA.\n\nbasicConstraints=CA:FALSE\n\n# Here are some examples of the usage of nsCertType. If it is omitted\n# the certificate can be used for anything *except* object signing.\n\n# This is OK for an SSL server.\n# nsCertType\t\t\t= server\n\n# For an object signing certificate this would be used.\n# nsCertType = objsign\n\n# For normal client use this is typical\n# nsCertType = client, email\n\n# and for everything including object signing:\n# nsCertType = client, email, objsign\n\n# This is typical in keyUsage for a client certificate.\n# keyUsage = nonRepudiation, digitalSignature, keyEncipherment\n\n# This will be displayed in Netscape's comment listbox.\nnsComment\t\t\t= \"OpenSSL/Python Generated Certificate\"\n\n# PKIX recommendations harmless if included in all certificates.\nsubjectKeyIdentifier=hash\nauthorityKeyIdentifier=keyid,issuer\n\n# This stuff is for subjectAltName and issuerAltname.\n# Import the email address.\n# subjectAltName=email:copy\n# An alternative to produce certificates that aren't\n# deprecated according to PKIX.\n# subjectAltName=email:move\n\nsubjectAltName = @alt_names\n\n# Copy subject details\n# issuerAltName=issuer:copy\n\n#nsCaRevocationUrl\t\t= http://www.domain.dom/ca-crl.pem\n#nsBaseUrl\n#nsRevocationUrl\n#nsRenewalUrl\n#nsCaPolicyUrl\n#nsSslServerName\n\n# This is required for TSA certificates.\n# extendedKeyUsage = critical,timeStamping\n#extendedKeyUsage = serverAuth, clientAuth, emailProtection\nextendedKeyUsage = serverAuth, emailProtection\n\n[ v3_req ]\n\n# Extensions to add to a certificate request\n\nbasicConstraints = CA:FALSE\nkeyUsage = nonRepudiation, digitalSignature, keyEncipherment\nsubjectAltName = @alt_names\n\n[alt_names]\n\n%(str_sans)s\n\n[ v3_ca ]\n\n# Extensions for a typical CA\n\n# PKIX recommendation.\n\nsubjectKeyIdentifier=hash\n\nauthorityKeyIdentifier=keyid:always,issuer\n\nbasicConstraints = critical,CA:true\n\n# Key usage: this is typical for a CA certificate. However since it will\n# prevent it being used as an test self-signed certificate it is best\n# left out by default.\n# keyUsage = cRLSign, keyCertSign\n\n# Some might want this also\n# nsCertType = sslCA, emailCA\n\n# Include email address in subject alt name: another PKIX recommendation\n# subjectAltName=email:copy\n# Copy issuer details\n# issuerAltName=issuer:copy\n\n# DER hex encoding of an extension: beware experts only!\n# obj=DER:02:03\n# Where 'obj' is a standard or added object\n# You can even override a supported extension:\n# basicConstraints= critical, DER:30:03:01:01:FF\n\n[ crl_ext ]\n\n# CRL extensions.\n# Only issuerAltName and authorityKeyIdentifier make any sense in a CRL.\n\n# issuerAltName=issuer:copy\nauthorityKeyIdentifier=keyid:always\n\n[ proxy_cert_ext ]\n# These extensions should be added when creating a proxy certificate\n\n# This goes against PKIX guidelines but some CAs do it and some software\n# requires this to avoid interpreting an end user certificate as a CA.\n\nbasicConstraints=CA:FALSE\n\n# Here are some examples of the usage of nsCertType. If it is omitted\n# the certificate can be used for anything *except* object signing.\n\n# This is OK for an SSL server.\n# nsCertType\t\t\t= server\n\n# For an object signing certificate this would be used.\n# nsCertType = objsign\n\n# For normal client use this is typical\n# nsCertType = client, email\n\n# and for everything including object signing:\n# nsCertType = client, email, objsign\n\n# This is typical in keyUsage for a client certificate.\n# keyUsage = nonRepudiation, digitalSignature, keyEncipherment\n\n# This will be displayed in Netscape's comment listbox.\nnsComment\t\t\t= \"OpenSSL/Python Generated Proxy Certificate\"\n\n# PKIX recommendations harmless if included in all certificates.\nsubjectKeyIdentifier=hash\nauthorityKeyIdentifier=keyid,issuer\n\n# This stuff is for subjectAltName and issuerAltname.\n# Import the email address.\n# subjectAltName=email:copy\n# An alternative to produce certificates that aren't\n# deprecated according to PKIX.\n# subjectAltName=email:move\n\n# Copy subject details\n# issuerAltName=issuer:copy\n\n#nsCaRevocationUrl\t\t= http://www.domain.dom/ca-crl.pem\n#nsBaseUrl\n#nsRevocationUrl\n#nsRenewalUrl\n#nsCaPolicyUrl\n#nsSslServerName\n\n# This really needs to be in place for it to be a proxy certificate.\nproxyCertInfo=critical,language:id-ppl-anyLanguage,pathlen:3,policy:foo\n\n####################################################################\n[ tsa ]\n\ndefault_tsa = tsa_config1\t# the default TSA section\n\n[ tsa_config1 ]\n\n# These are used by the TSA reply generation only.\ndir\t\t= ./demoCA\t\t# TSA root directory\nserial\t\t= $dir/tsaserial\t# The current serial number (mandatory)\ncrypto_device\t= builtin\t\t# OpenSSL engine to use for signing\nsigner_cert\t= $dir/tsacert.pem \t# The TSA signing certificate\n # (optional)\ncerts\t\t= $dir/cacert.pem\t# Certificate chain to include in reply\n # (optional)\nsigner_key\t= $dir/private/tsakey.pem # The TSA private key (optional)\nsigner_digest = sha256\t\t\t# Signing digest to use. (Optional)\ndefault_policy\t= tsa_policy1\t\t# Policy if request did not specify it\n # (optional)\nother_policies\t= tsa_policy2, tsa_policy3\t# acceptable policies (optional)\ndigests = sha1, sha256, sha384, sha512 # Acceptable message digests (mandatory)\naccuracy\t= secs:1, millisecs:500, microsecs:100\t# (optional)\nclock_precision_digits = 0\t# number of digits after dot. (optional)\nordering\t\t= yes\t# Is ordering defined for timestamps?\n # (optional, default: no)\ntsa_name\t\t= yes\t# Must the TSA name be included in the reply?\n # (optional, default: no)\ness_cert_id_chain\t= no\t# Must the ESS cert id chain be included?\n # (optional, default: no)\ness_cert_id_alg\t\t= sha1\t# algorithm to compute certificate\n # identifier (optional, default: sha1)\n \"\"\" % locals()\n with open(filename, \"w\") as f:\n f.write(cnf)\n f.close()", "def list_to_dict(l):\n d={}\n for index, item in enumerate(l[::2]):\n key=item\n value=l[index*2+1]\n if isinstance(value, list) and value!=[]:\n d[key]=list_to_dict(value)\n else:\n d.setdefault(key, []).append(value)\n return d", "def parse_parameters_file(lines):\n param_dict = {}\n for line in lines:\n line = line.strip()\n if line:\n (param, values) = line.split('\\t')\n param_dict[param] = values.split(',')\n return param_dict", "def parse_crn_string(data):\n crn_document = crn_document_setup()\n return _post_process(crn_document.parseString(data).asList())" ]
[ "0.59891826", "0.58771014", "0.5808679", "0.58030844", "0.5633769", "0.562944", "0.5532584", "0.54721385", "0.5428147", "0.5418335", "0.54139864", "0.53684205", "0.5357799", "0.5338177", "0.5321925", "0.5310008", "0.53023285", "0.52133584", "0.520872", "0.5188802", "0.51713", "0.5143988", "0.5112959", "0.5106991", "0.5106564", "0.5101687", "0.5092882", "0.5078429", "0.5069801", "0.5063048", "0.50546813", "0.503935", "0.5021485", "0.501231", "0.50112087", "0.4999986", "0.4999924", "0.49890962", "0.49816594", "0.49731937", "0.49717355", "0.4957932", "0.49529177", "0.49478412", "0.49382165", "0.4935348", "0.492859", "0.49247488", "0.49037093", "0.4877861", "0.487637", "0.4869072", "0.48573968", "0.48540846", "0.4853141", "0.4843257", "0.4840399", "0.48384067", "0.48383105", "0.48362863", "0.4833186", "0.48323292", "0.48317528", "0.48216292", "0.4821411", "0.4819266", "0.48072794", "0.48022366", "0.479665", "0.4793606", "0.47919917", "0.4787678", "0.47703016", "0.47699365", "0.47647238", "0.47609854", "0.47597393", "0.47591245", "0.47548643", "0.47539905", "0.4753014", "0.47486028", "0.47486028", "0.4733887", "0.47315928", "0.47298324", "0.4728968", "0.47222683", "0.47200716", "0.47181946", "0.47178203", "0.47169384", "0.47147715", "0.47094384", "0.46997356", "0.46990767", "0.4695247", "0.46899775", "0.4689612", "0.46885788" ]
0.4789175
71
Print message and exit.
def die(txt, *args): if args: txt = txt % args sys.stderr.write(txt + '\n') sys.exit(1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exit_message(message):\n\tprint(yellow(message))\n\tsys.exit(1)", "def exitWithMsg(msg):\n\tprint(msg + \" -- quitting\")\n\tsys.exit(0)", "def print_ok(msg):\n print('OK - %s' % (msg))\n sys.exit(0)", "def exit(cls, msg):\n Console.error(msg)\n sys.exit()", "def print_msg_exit(msg=\"\", exit_code=0):\n if msg:\n print(msg)\n sys.exit(exit_code)", "def _exit(message):\n\tprint('ERROR: ' + message, file=sys.stderr)\n\tsys.exit(1)", "def exit_program(msg):\n print(msg)\n sys.exit(1)", "def error(message):\n print message\n sys.exit(2)", "def quit():\n print(\"Thank you. Have a nice day:\")\n sys.exit()", "def exit(self) -> None:\n logger.info(messages.general(\"Thank You for using FooTools.\\n\"))\n exit()", "def do_quit(self, arg):\n cprint(('Thankyou for Using this todo Application!'), 'yellow')\n exit()", "def _exit() -> None:\n\n print(\n \"Thanks for using TbSET. \"\n \"See you next time!\\n\"\n )", "def exit_with_msg(msg):\n print(\"{}\\n\\nUsage: {} <arg1> <arg2>\".format(msg, sys.argv[0]))\n exit(0)", "def exit_with_msg(msg):\n print(\"{}\\n\\nUsage: {} <arg1> <arg2>\".format(msg, sys.argv[0]))\n exit(0)", "def print_critical(message):\n print(message)\n raise SystemExit(-1)", "def ErrorExit(msg):\r\n print >>sys.stderr, msg\r\n sys.exit(1)", "def exit_program():\n print(\"Good Bye! Happy Searching...\")", "def do_quit(self, arg):\n\n print('Good Bye!')\n exit()", "def do_EOF(self, _: str) -> None:\n print()\n exit(0)", "def terminate(exitmsg: str):\n print(exitmsg)\n sys.exit(1)", "def ErrorExit(msg):\n print >>sys.stderr, msg\n sys.exit(1)", "def exit_success(message: Optional[str] = None) -> NoReturn:\n\n\tif (message != None):\n\t\tprint(message)\n\tsys.exit(EXIT_SUCCESS)", "def _ErrorExit(message):\n print >>sys.stderr, message\n sys.exit(1)", "def die(msg):\n errorPrint(msg)\n sys.exit(1)", "def report(exit_code, message):\n prefix = STATUSES[exit_code].upper()\n print('%s: %s' % (prefix, message))\n exit(exit_code)", "def print_error_and_exit(error_message):\n\n print(\"Error: \" + error_message)\n sys.exit()", "def exit_program():\n\n print(\"Thank you. Bye\")\n return \"exit\"", "def do_quit(self, args):\n print('Good Bye!')\n exit()", "def finish(self, message):\n self.stdout = message\n self.returncode = 0", "def quit_program():\n print(\"Thank you for your time. Goodbye.\")\n exit()", "def print_message(message):\n print(\"-------------------------\")\n print(message)", "def main():\n print(\"Everything is ok.\")", "def exit_error(message: Optional[str] = None) -> NoReturn:\n\n\tif (message != None):\n\t\tprint(message)\n\tsys.exit(EXIT_FAILURE)", "def print_message(message):\r\n return print(message)", "def exit_with_error_message (msg):\n print (\"[ERROR] %s\\n\" % msg)\n raise SystemExit", "def exit(self):\n print(\"\\n***************************** Exit Metafor *****************************\")", "def exit(self):\n print(\"\"\"\\n\n ________________________________________\n\n Thank you for using\n your HABITSBOX today\n ________________________________________\n \"\"\")\n self.analytics.close()\n sys.exit(0)", "def exit_program():\n print(\"Good bye\")\n sys.exit()", "def exit_prompt(message=''):\r\n if message != '': print(str(message))\r\n input('\\nPress [Enter] to exit...')\r\n sys.exit()", "def close(self):\n if self.print_msg:\n print(' ')", "def present_exit_massage(self):\n print(\"Thank you for using the calculator....\")", "def msg(_type, text, exit=0):\n sys.stderr.write(\"%s: %s\\n\" % (_type, text))\n sys.exit(exit)", "def finalize_error():\n print('')\n exit(-1)", "def exit(self):\n if self.debug:\n print(\"%s exit\" % self.name)\n self.stop()", "def exit():\n sys.exit(1)", "def exit(self):\n logger.debug(\"EXIT\")", "def quit_with_error(msg):\n import traceback\n stack = traceback.extract_stack()\n frame = stack[-3]\n print(msg)\n if (frame[3] is None):\n suffix = ''\n else:\n suffix = \": \"+frame[3]\n print('Line',repr(frame[1]),'of',frame[0] + suffix)\n print('Quitting with Error')\n raise SystemExit()", "def errorExit(msg):\n msgString = (\"Error: \" + msg + \"\\n\")\n sys.stderr.write(msgString)\n sys.exit()", "def do_quit(self, arg):\n exit()", "def exit(message, code=1):\r\n global _quiet\r\n if not _quiet and message and len(message) > 0:\r\n sys.stderr.write(\"%s (%s)\\n\" % (message, code))\r\n sys.exit(code)", "def __abort_script(message):\n print(message)\n sys.exit()", "def print_unknown(msg):\n print('UNKNOWN - %s' % (msg))\n sys.exit(3)", "def exit(self, status):\n self._printer(\"exit {0}\".format(status))", "def exit(self, status):\n self._printer(\"exit {0}\".format(status))", "def quit():\n raise EmbeddedConsoleExit", "def do_exit(self, arg):\n self.db.close_db()\n print(\" \\\\o_ Bye-bye...\")\n print(\" / \")\n print(\"<\\\\\")\n sys.exit()", "def write_error_message(self, message: str):\n\n return sys.exit(message)", "def error(self, message):\n ErrorExit('error: {}\\n'.format(message), 2)", "def _quit(self) -> None:\n self._show_bag(True)\n print(\"Thanks for playing!\")\n exit()", "def screen_exit(self, error=0):\n sys.exit(error)", "def Die(msg):\n print(msg, file=sys.stderr)\n sys.exit(1)", "def _exit(msg):\n __exit(msg)", "def display_message():", "def do_exit(self, line): \n sys.exit(0)", "def error_msg(msg: str) -> None:\n print(\"ERROR: \", msg)\n exit(2)", "def error(msg):\n print(msg, file=sys.stderr)\n sys.exit()", "def do_exit(self, args):\n sys.exit(1)", "def PrintMessage(self, message):\n self.Stop(message)\n self.Start(self.what)", "def showMessage(self, message):\r\n print message", "def die(print_string):\n print(print_string)\n sys.exit(1)", "def _display_message(message: str) -> None:\n print(message)", "def do_exit(self,*args):\r\n return True", "def ConsoleExit(self, errorcode=200):\n pass", "def error(msg):\n print 'ERROR: %s' % msg\n sys.exit(1)", "def exit(self):\n pass", "def bail( msg ):\n # Terminate, with helpful error message:\n print(\"ERROR: \" + msg + \"... exiting.\", file=sys.stderr)\n exit(1)", "def print_message(self, message):\n print(message)", "def print_message(self, message):\n print(message)", "def output(self, msg):", "def stop_and_outputlogMessage(message):\n assert False\n outputlogMessage(message)", "def abort(message):\n\n sys.stderr.write(message + '\\n')\n sys.exit(1)", "def exit_with_error(error_msg):\n print error_msg\n sys.exit(1)", "def do_exit(self, s):\n return True", "def display_message():\n\tprint(\"In this chapter we will be learning how to write functions\")", "def error(self, message):\n sys.stderr.write('error: %s\\n' % message)\n self.print_help()\n sys.exit(2)", "def exit(self, usermessage=None):\n builtin.print = _builtin_print\n warnings.showwarning = _builtin_warning\n return self", "def exit(\n self,\n status_code: int = 0,\n message: Optional[str] = None,\n usage: Optional[str] = None,\n ) -> NoReturn:\n print(\"\\n\\n\".join(m for m in (usage, message) if m)) # noqa: T201\n sys.exit(status_code)", "def success(message):\n if DEBUG:\n with print_lock:\n print((Colours.OK_GREEN + 'SUCCESS: ' + Colours.END_COLOUR + message).strip())", "def abort(msg=''):\n if msg:\n print >> sys.stderr, msg\n sys.exit(1)", "def exit_out():\r\n print(\"Exiting...\")\r\n time.sleep(3)\r\n os.system(\"pause\")", "def main():\n codedmessage = ReadCodedMessage()\n PlayCodedMessage(codedmessage)\n PlayAgain(codedmessage)\n message = DecodeCodedMessage(codedmessage)\n if (message==\"?\"):\n if DEBUG:print(\"Unknown code - try again!\")\n else:\n if DEBUG:print (\"Message: \", message)", "def command_quit(self, arg):\n self.write('221 Bye', self.finish)", "def sys_exit(msg, error_level=1):\n sys.stderr.write(\"%s\\n\" % msg.rstrip())\n sys.exit(error_level)", "def postMessage(self, message):\n if self.BotOutputRequested:\n pass\n else:\n SiteDetailOutput.PrintStandardOutput(message, verbose=self._verbose)", "def _fatal(self, message: str) -> NoReturn:\n self._trace(message, Level.ERROR)\n sys.exit(1)", "def goodbye(self, args):\n\t\tself.write_line(\"GOODBYE\")\n\t\tself.close();", "def do_EOF(self, args):\n print(\"\")\n return True", "def print_std_error(self):\n print(self.std_error)\n sys.exit()", "def main():\n print(\"Everythin is ok\")", "def _write_err_msg_and_quit(self, msg):\n sys.stderr.write(msg)\n sys.exit(1)", "def die(msg: str) -> None:\n logger.error(msg)\n print(msg)\n exit(msg)" ]
[ "0.77262276", "0.76083595", "0.74169284", "0.73643696", "0.7243128", "0.719227", "0.7183562", "0.71205103", "0.7051382", "0.70065045", "0.69638604", "0.6944771", "0.69369346", "0.69369346", "0.6917173", "0.6913734", "0.6887772", "0.68613786", "0.679101", "0.6778247", "0.6768838", "0.67677623", "0.6764495", "0.6745617", "0.6739277", "0.67229635", "0.66944796", "0.66865855", "0.66756135", "0.66490865", "0.66414315", "0.66347104", "0.6631524", "0.66265047", "0.6619127", "0.6615769", "0.660823", "0.65960395", "0.65839076", "0.6579458", "0.65706575", "0.6551606", "0.65263087", "0.651993", "0.6516461", "0.65106356", "0.64979744", "0.6495569", "0.6483242", "0.64715093", "0.6463499", "0.6457441", "0.645703", "0.645703", "0.6454223", "0.6450314", "0.64434206", "0.64432687", "0.6440602", "0.6437823", "0.64257574", "0.64108497", "0.6404479", "0.6393092", "0.6388471", "0.6379479", "0.6357751", "0.6356217", "0.6346143", "0.6321642", "0.631636", "0.63067365", "0.6304085", "0.6301985", "0.6299349", "0.6296668", "0.62911516", "0.62911516", "0.6285214", "0.6279906", "0.6259757", "0.6259491", "0.6253644", "0.62486446", "0.6241117", "0.62390524", "0.62373304", "0.6231473", "0.62240136", "0.62226343", "0.6215069", "0.6199766", "0.61890346", "0.618618", "0.6181456", "0.6180149", "0.61779445", "0.6167857", "0.61660993", "0.6165982", "0.6165322" ]
0.0
-1
Print message to stderr.
def msg(txt, *args): if QUIET: return if args: txt = txt % args sys.stderr.write(txt + '\n')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def printerr(message):\n sys.stderr.write('{}\\n'.format(message))\n sys.stderr.flush()", "def print_stderr(message):\n sys.stderr.write(\"%s\\n\" % message)\n sys.stderr.flush()", "def to_stderr(message):\n print >> sys.stderr, message", "def error(message):\n print(message, file=sys.stderr)", "def printerr(msg):\n print(msg, file=sys.stderr)", "def _print_error(message):\n sys.stderr.write(str(message) + \"\\n\")\n sys.stderr.flush()", "def print_err(msg):\n print(msg, file=sys.stderr)", "def print_to_stderr(msg):\n sys.stderr.write(msg)", "def print_stderr(message):\r\n if LogOptions.stderr_log_level() != LogOptions.LOG_LEVEL_NONE:\r\n print(message, file=sys.stderr)", "def eprint(errmsg):\n print(errmsg, file=STDERR)", "def perror(message):\n print(message, file=sys.stderr)", "def err(*message, **kwargs):\n print(*message, file=sys.stderr, **kwargs)", "def print_error(message):\n from sys import stderr\n print(\"\\033[1;31;40m \" + message + \"\\033[0;37;40m\", file=stderr)", "def err(message: str) -> None:\n filename, line = filename_line()\n\n with State.lock:\n State.stderr.write(err_as_text(filename=filename, line=line, message=message))\n State.stderr.flush()", "def errprint(msg):\n\n print('!! *** ERROR: %s' % msg)", "def _print_error(msg):\n sys.stderr.write(msg + '\\n')\n LOG.error(msg)", "def print_error(msg):\n print(\"[{}] {}\".format(datetime.now(), msg), file=sys.stderr)", "def msg_err(message):\n to_stdout(\" !!! {message}\".format(message=message), colorf=red, bold=True)\n if _logger:\n _logger.error(message)", "def print_std_err(str_):\n print(str_, file=sys.stderr)", "def print_error_message(message):\r\n return print('ERROR:',message)", "def write(self, msg):\n sys.stderr.write(msg)", "def error(msg):\n print(msg, file=sys.stderr)\n sys.exit()", "def printerr(*args, **kwargs):\n console_print(sys.stderr, *args, **kwargs)", "def log_error(message):\n sys.stderr.write(message)\n sys.stderr.flush()", "def writeMessage(message):\n stderr.write(message + '\\n')\n stderr.flush()\n return", "def err(msg):\n print(colored.red(\"[ERROR]: {0}\".format(msg)))", "def write(self, msg):\n\n sys.stderr.write(msg)", "def print_err(*args, **kwargs):\n print(*args, file=stderr, **kwargs)", "def error(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def error(msg):\n sys.stdout.write('%s[ ERROR ]%s %s\\n' % (colors.RED, colors.RESET, msg))", "def print_failure_message(message):\n try:\n import colorama\n print(colorama.Fore.RED + message + colorama.Fore.RESET,\n file=sys.stderr)\n except ImportError:\n print(message, file=sys.stderr)", "def shell_msg(msg):\n print(msg, file=sys.stderr)", "def error(message):\n global LAST_LOG\n LAST_LOG = message\n cprint('\\r[ERR] {0}'.format(message), 'red', file=sys.stderr)", "def error(message):\n if DEBUG:\n with print_lock:\n print((Colours.FAIL + 'ERROR: ' + Colours.END_COLOUR + message).strip())", "def log_err(msg):\n msg = 'ERROR: {0}\\n'.format(msg)\n sys.stderr.write(msg)", "def err_print(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)", "def print_err(err):\n return stdout.write(err.args[0])", "def logerror(msg):\n sys.stderr.write(str(msg) + '\\n')\n sys.stderr.flush()", "def errprint(*args):\n sys.stderr.write(' '.join(map(str,args)) + '\\n')", "def error(message):\n print str(message)", "def print_err(self, *lst):\n self.print2file(self.stderr, False, True, *lst)", "def to_stderr(self, message):\n message = self.ydl._bidi_workaround(message)\n output = message + '\\n'\n self.ydl._write_string(output, self.ydl._err_file)", "def print_error(message: str):\n print_with_color(message, constant.Color.FAIL)", "def error(self, msg, stderr=True):\n self.log(msg, level=self.ERROR, stderr=stderr)", "def print_failure_msg(msg):\n click.secho(msg, fg='red', file=sys.stderr)", "def _display_error(message: str) -> None:\n print()\n print(message, end='\\n\\n')", "def StdErr(self, message, die=False):\n error_message = self._FormatMessage(message)\n if die:\n exit_message = error_message.rstrip('\\n')\n sys.exit(exit_message)\n sys.stderr.write(error_message)\n sys.stderr.flush()", "def error(cls, message):\n print('[ERROR] {0}'.format(message))", "def stderr_print(*args, **kwargs):\n\n sys.stdout.flush()\n print(*args, **kwargs, file=sys.stderr)\n sys.stderr.flush()\n\n # else caller has to \"{}\\n\".format(...) and flush", "def err(*s):\n sys.stderr.write(TERM.bold_red)\n sys.stderr.write('Error: ')\n for part in s:\n sys.stderr.write(part)\n sys.stderr.write(TERM.normal)\n sys.stderr.write('\\n')", "def err(msg, die=None):\r\n sys.stderr.write(msg + '\\n')\r\n if die:\r\n sys.exit((die if type(die) is int else 1))", "def printError(s):\r\n sys.stderr.write(\"ERROR: %s\\n\" % s)\r\n sys.exit(-1)", "def color_print(message, color, newline='\\n'):\n sys.stderr.write('%s%s%s%s' % (color, message, ANSI_NORMAL, newline))", "def errordie(message):\n prog = os.path.basename(sys.argv[0])\n sys.stderr.write(\"{}: error: {}\\n\".format(prog, message))\n sys.exit(1)", "def emit_message(message, stream=None):\n if stream is None:\n stream = sys.stderr\n stream.write(\"%(message)s\\n\" % vars())\n stream.flush()", "def err(message):\n\n timestamp = format_time(get_time())\n message = '{} - [ERROR] - {}'.format(timestamp, message)\n _log_status(message)", "def writeError(errorMsg):\n stdout.write(errorMsg + '\\n')\n stdout.flush()\n return", "def print_err(self, *args):\r\n strings = []\r\n for arg in args:\r\n strings.append(str(arg))\r\n self.stderr.write(\",\".join(strings))", "def show_error(title, message, print_message=False):\n\n pass", "def Message(msg, id=260, ok=None):\n sys.stderr.write(msg+'\\n')", "def stderr(self) -> str:\n _args: list[Arg] = []\n _ctx = self._select(\"stderr\", _args)\n return _ctx.execute_sync(str)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def warn(msg):\n print(msg, file=sys.stderr)", "def printError(message):\n try:\n message = str(message)\n except Exception as e:\n print(f\"{Fore.RED}{str(ptime())}: [ERROR]{Style.RESET_ALL} {Fore.WHITE}\" + str(e) + Style.RESET_ALL)\n\n print(f\"{Fore.RED}{str(ptime())}: [ERROR]{Style.RESET_ALL} {Fore.WHITE}\" + message + Style.RESET_ALL)", "def shell_error(msg, exitcode=1):\n print(msg, file=sys.stderr)\n exit(exitcode)", "def error(s):\n sys.stderr.write(\"%s: %s\\n\" % (NAME, s))\n sys.exit(1)", "def displayStderr(self, test):\n test = proto_test(test)\n if test.dotted_name in self.stderr_errput:\n self.stream.write(\n \"\\n{} for {}\\n{}\".format(\n self.colors.yellow(\"Captured stderr\"),\n self.colors.bold(test.dotted_name),\n self.stderr_errput[test],\n )\n )\n del self.stderr_errput[test]", "def write_err(self, text): # pragma: no cover\n # type: (str) -> None\n stderr = self.stderr\n if self.stderr.closed:\n stderr = sys.stderr\n stderr.write(decode_output(u\"\\r\", target_stream=stderr))\n stderr.write(decode_output(CLEAR_LINE, target_stream=stderr))\n if text is None:\n text = \"\"\n text = decode_output(u\"{0}\\n\".format(text), target_stream=stderr)\n self.stderr.write(text)\n self.out_buff.write(decode_output(text, target_stream=self.out_buff))", "def print_error(self, message: str=\"\", src_file: str=\"\") -> None:\n if self._verbosity_level >= int(VerbosityLevel.VERBOSITY_LEVEL1):\n _mes = src_file + \": \" + message\n if self._print_statements_enabled:\n print(\"ERROR \\t\\t- \", src_file + \": \\t\" + message)\n logging.error(_mes)", "def notify_error(self, error: str) -> None:\n print(error, file=sys.stderr)", "def notify_error(self, error: str) -> None:\n print(error, file=sys.stderr)", "def notify_error(self, error: str) -> None:\n print(error, file=sys.stderr)", "def VerboseOut(self, message):\n if self._verbose:\n self.StdErr(message, die=False)", "def print_error(*args):\n print_message_with_title('ERROR', *args, c1='r', c2=None, style='b')", "def print_diagnostics(self, logfile, message):\n\n color_stdout(message, schema='error')\n print_tail_n(logfile, 10)", "def error(message, exits=None): # pylint: disable=unused-argument\n print(crayons.red(fmt(message, \"[✗]\"), bold=True))\n sys.stdout.flush()", "def error(self, message):\n print message", "def print_messages(self):\n if self.messages:\n self.messages.append(\"\")\n sys.stderr.write(os.linesep.join(self.messages))", "def log(msg, *args):\n sys.stderr.write(msg.format(*args) + '\\n')", "def error(self, message: str) -> None:\n lines = message.split('\\n')\n linum = 0\n formatted_message = ''\n for line in lines:\n if linum == 0:\n formatted_message = 'Error: ' + line\n else:\n formatted_message += '\\n ' + line\n linum += 1\n\n self.print_usage(sys.stderr)\n\n # Format errors with style_warning()\n formatted_message = ansi.style_warning(formatted_message)\n self.exit(2, '{}\\n\\n'.format(formatted_message))", "def log_error(err):\n print(err)", "def error(self, message):\n sys.stderr.write(message[0].capitalize() + message[1:] + '\\n')\n sys.stderr.write('Use \"arhc.py --help\" to view more information.\\n')\n exit()", "def write(self, msg):\n\n self.clear()\n if not msg.endswith(\"\\n\"):\n sys.stderr.write(msg+\"\\n\")\n else:\n sys.stderr.write(msg)\n self.draw()", "def error(*args, **kwargs): # pragma: nocover\n kwargs['file'] = sys.stderr\n print(\"\\n\\tERROR:\", *args, **kwargs)\n if args and args[0].startswith(\"[Errno 2] No such file or directory\"):\n print(\"\\t(Did you forget to include an __init__.py?)\")\n sys.exit(1)" ]
[ "0.83818024", "0.83467203", "0.8323325", "0.8299432", "0.8288418", "0.8276263", "0.82736266", "0.81002593", "0.80394256", "0.79781455", "0.790646", "0.7891345", "0.7791808", "0.76625943", "0.75944436", "0.75564045", "0.7551808", "0.7516099", "0.75122887", "0.75074214", "0.74721956", "0.74709886", "0.74339306", "0.7357102", "0.73315644", "0.73143035", "0.73107547", "0.7307033", "0.73021644", "0.7252224", "0.7228068", "0.72243637", "0.7187593", "0.7185036", "0.7183317", "0.71702665", "0.71325445", "0.7132379", "0.71046925", "0.7029845", "0.70295763", "0.6956177", "0.6933999", "0.68957174", "0.68939334", "0.68867654", "0.68846273", "0.6800708", "0.67741567", "0.6767196", "0.67442816", "0.6736123", "0.6722827", "0.67193097", "0.67152554", "0.6694402", "0.66871005", "0.6683309", "0.6671081", "0.6666825", "0.6659692", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66595143", "0.66566974", "0.6620833", "0.6600925", "0.6599546", "0.6594797", "0.65892166", "0.6575363", "0.6575363", "0.6575363", "0.65645486", "0.6549572", "0.65471363", "0.65420985", "0.65242654", "0.65237594", "0.6505746", "0.6498745", "0.64929575", "0.6478753", "0.6473153", "0.6467684" ]
0.0
-1
Collect commandline arguments into CertInfo.
def info_from_args(args): return CertInfo( subject=parse_dn(args.subject), usage=parse_list(args.usage), alt_names=parse_list(args.san), ocsp_nocheck=args.ocsp_nocheck, ocsp_must_staple=args.ocsp_must_staple, ocsp_must_staple_v2=args.ocsp_must_staple_v2, ocsp_urls=parse_list(args.ocsp_urls), crl_urls=parse_list(args.crl_urls), issuer_urls=parse_list(args.issuer_urls), permit_subtrees=parse_list(args.permit_subtrees), exclude_subtrees=parse_list(args.exclude_subtrees), ca=args.CA, path_length=args.path_length)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _set_arguments(self):\n cert_location = f\"dependencies{sep}certificates{sep}localuser.crt\"\n key_location = f\"dependencies{sep}certificates{sep}localuser.key\"\n assert Path(cert_location).exists(), (\n f\"The certificate isn't \"\n f\"present at location {Path(cert_location).absolute()}\"\n )\n assert Path(key_location).exists(), (\n f\"The certificate key isn't \"\n f\"present at location {Path(key_location).absolute()}\"\n )\n self._arguments = [\n (\n \"test-certificate-verify\",\n [\"-k\", key_location, \"-c\", cert_location],\n ),\n (\n \"test-sig-algs\",\n [],\n ),\n (\n \"test-clienthello-md5\",\n [],\n ),\n (\n \"test-tls13-pkcs-signature\",\n [],\n ),\n ]", "def add_certificate_arguments(parser):\n group = parser.add_argument_group(\"Certificate management\")\n group.add_argument(\n \"-sn\", \"--serial_number\",\n help=\"Serial number for the certificate\",\n type=int,\n default=1\n )\n group.add_argument(\n \"-d\", \"--duration\",\n help=\"Period of validity for certificate (seconds)\",\n type=int,\n default=60*60*24*(365*100+25)\n )", "def _ParseCertificateArguments(client, args):\n self_managed = None\n managed = None\n certificate_type = None\n if args.certificate:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.SELF_MANAGED\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n self_managed = client.messages.SslCertificateSelfManagedSslCertificate(\n certificate=certificate, privateKey=private_key)\n if args.domains:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.MANAGED\n managed = client.messages.SslCertificateManagedSslCertificate(\n domains=args.domains)\n return certificate_type, self_managed, managed", "def get_cli_arguments(self):\n pass", "def _Args(parser,\n include_l7_internal_load_balancing=False,\n support_managed_certs=False):\n parser.add_argument(\n '--description',\n help='An optional, textual description for the SSL certificate.')\n\n parser.display_info.AddCacheUpdater(\n flags.SslCertificatesCompleterBeta\n if include_l7_internal_load_balancing else flags.SslCertificatesCompleter)\n\n if support_managed_certs:\n managed_or_not = parser.add_group(\n mutex=True,\n required=True,\n help='Flags for managed or self-managed certificate. ')\n\n managed_or_not.add_argument(\n '--domains',\n metavar='DOMAIN',\n type=arg_parsers.ArgList(min_length=1),\n default=[],\n help=\"\"\"\\\n List of domains to create a managed certificate for.\n \"\"\")\n\n not_managed = managed_or_not.add_group('Flags for self-managed certificate')\n not_managed.add_argument(\n '--certificate',\n metavar='LOCAL_FILE_PATH',\n required=True,\n help=\"\"\"\\\n Path to a local certificate file to create a self-managed\n certificate. The certificate must be in PEM format. The certificate\n chain must be no greater than 5 certs long. The chain must include at\n least one intermediate cert.\n \"\"\")\n not_managed.add_argument(\n '--private-key',\n metavar='LOCAL_FILE_PATH',\n required=True,\n help=\"\"\"\\\n Path to a local private key file. The private key must be in PEM\n format and must use RSA or ECDSA encryption.\n \"\"\")\n else:\n parser.add_argument(\n '--certificate',\n required=True,\n metavar='LOCAL_FILE_PATH',\n help=\"\"\"\\\n Path to a local certificate file. The certificate must be in PEM\n format. The certificate chain must be no greater than 5 certs long. The\n chain must include at least one intermediate cert.\n \"\"\")\n\n parser.add_argument(\n '--private-key',\n required=True,\n metavar='LOCAL_FILE_PATH',\n help=\"\"\"\\\n Path to a local private key file. The private key must be in PEM\n format and must use RSA or ECDSA encryption.\n \"\"\")", "def command_line_arguments():\n\n try:\n parser = argparse.ArgumentParser(description='Log Handler/Cleaner/Copier for Idemia DocAuth')\n\n # Add required arguments.\n parser.add_argument('action', choices=['clean', 'download'], type=str, help='clean or download')\n\n # Parse the arguments\n args = parser.parse_args()\n\n return args\n\n except Exception as err:\n print(err)\n return", "def main():\n licensify(_parse_args())", "def _parse_cpachecker_args(cpachecker_args):\n\n class Run:\n options = []\n identifier = None\n sourcefiles = []\n propertyfile = None\n\n run = Run()\n run.identifier = cpachecker_args\n\n i = iter(cpachecker_args)\n while True:\n try:\n option = next(i)\n if len(option) == 0:\n continue # ignore empty arguments\n\n if option in [\n \"-heap\",\n \"-timelimit\",\n \"-entryfunction\",\n \"-spec\",\n \"-config\",\n \"-setprop\",\n ]:\n run.options.append(option)\n run.options.append(next(i))\n\n elif option[0] == \"-\":\n run.options.append(option)\n\n else:\n run.sourcefiles.append(option)\n\n except StopIteration:\n break\n\n return run", "def main():\n ssl_date_fmt = r'%b %d %H:%M:%S %Y %Z'\n #cert_file_name = os.path.join(os.path.dirname(__file__), \"testcert.pem\")\n\n parser = argparse.ArgumentParser(description='Parse a certificate and show days left')\n parser.add_argument('-v', '--verbose', action='store_true', help='show full certificate')\n parser.add_argument('cert', nargs='+', help='certifcate file(s)')\n args = parser.parse_args()\n for cert_file_name in args.cert:\n try:\n cert_dict = ssl._ssl._test_decode_cert(cert_file_name)\n serial = cert_dict['serialNumber']\n subject = dict(x[0] for x in cert_dict['subject'])\n issued_to = subject['commonName']\n time_left = datetime.datetime.strptime(cert_dict['notAfter'], ssl_date_fmt) - datetime.datetime.utcnow()\n if args.verbose:\n pp(cert_dict)\n ssl_expires_in(issued_to, serial, time_left)\n\n except Exception as error:\n print(\"Error decoding certificate: {:}\".format(error))", "def get_args():\n if len(sys.argv) == 3:\n return sys.argv[1:]\n print(\"USAGE: python3 extract_cds.py infile outfile\\n\\n\")\n exit()", "def __check_opts(self):\n self.ca_cert_file = os.environ['HOME'] + '/.cat_installer/ca.pem'\n self.pfx_file = os.environ['HOME'] + '/.cat_installer/user.p12'\n if not os.path.isfile(self.ca_cert_file):\n print(Messages.cert_error)\n sys.exit(2)", "def GenerateToolArgStrings(options):\n # Preparing dnstreexport\n dnstreeexport_array = [options.tree_export]\n dnstreeexport_array.extend(['-c', options.config_file])\n if( options.force ):\n dnstreeexport_array.append('--force')\n if( options.quiet ):\n dnstreeexport_array.append('--quiet')\n dnstreeexport_arg_string = ' '.join(dnstreeexport_array)\n\n # Preparing dnscheckconfig\n dnscheckconfig_array = [options.check_config]\n dnscheckconfig_array.extend(['-i', '%s' % options.id])\n dnscheckconfig_array.extend(['--config-file', options.config_file])\n if( options.named_checkzone ):\n dnscheckconfig_array.extend(['-z', options.named_checkzone])\n if( options.named_checkconf ):\n dnscheckconfig_array.extend(['-c', options.named_checkconf])\n if( not options.quiet ):\n dnscheckconfig_array.append('-v')\n dnscheckconfig_arg_string = ' '.join(dnscheckconfig_array)\n\n # Preparing dnsservercheck\n dnsservercheck_array = [options.server_check]\n dnsservercheck_array.extend(['--export-config'])\n dnsservercheck_array.extend(['-c', options.config_file])\n dnsservercheck_array.extend(['-i', '%s' % options.id])\n dnsservercheck_arg_string = ' '.join(dnsservercheck_array)\n\n # Preparing dnsconfigsync\n dnsconfigsync_array = [options.config_sync]\n dnsconfigsync_array.extend(['--export-config'])\n dnsconfigsync_array.extend(['-i', '%s' % options.id])\n dnsconfigsync_array.extend(['-c', options.config_file])\n if( options.ssh_id ):\n dnsconfigsync_array.extend(['--ssh-id', options.ssh_id])\n if( options.rndc_exec ):\n dnsconfigsync_array.extend(['--rndc-exec', options.rndc_exec])\n if( options.rndc_port ):\n dnsconfigsync_array.extend(['--rndc-port', options.rndc_port])\n if( options.rndc_key ):\n dnsconfigsync_array.extend(['--rndc-key', options.rndc_key])\n if( options.rndc_conf ):\n dnsconfigsync_array.extend(['--rndc-conf', options.rndc_conf])\n dnsconfigsync_arg_string = ' '.join(dnsconfigsync_array)\n\n # Preparing dnsquerycheck\n dnsquerycheck_array = [options.query_check]\n dnsquerycheck_array.extend(['--export-config'])\n dnsquerycheck_array.extend(['-c', options.config_file])\n dnsquerycheck_array.extend(['-i', '%s' % options.id])\n dnsquerycheck_array.extend(['-n', '%s' % options.number])\n dnsquerycheck_array.extend(['-p', '%s' % options.port])\n dnsquerycheck_arg_string = ' '.join(dnsquerycheck_array)\n\n return [dnstreeexport_arg_string,\n dnscheckconfig_arg_string,\n dnsservercheck_arg_string,\n dnsconfigsync_arg_string, \n dnsquerycheck_arg_string]", "def cli_arguments():\n\n parser = argparse.ArgumentParser(\n formatter_class=argparse.RawDescriptionHelpFormatter,\n usage=f\"\\n{Color.DETAIL}pdforce.py [-p <pdf>] [-w <wordlist>] [-e <encoding>] [-o <output>] [-c] [-h/--help]{Color.END}\",\n description=f\"{Color.EMPHASIS}{TITLE}\\nLightweight PDF password cracker. USE FOR LEGAL INTENTS ONLY.{Color.END}\",\n epilog=f\"{Color.EMPHASIS}Made by @poponealex - https://github.com/poponealex{Color.END}\",\n )\n\n parser.add_argument(\n \"-p\",\n \"--pdf\",\n type=str,\n help=f\"{Color.INFORMATION}Path to the pdf file.{Color.END}\",\n action=\"store\",\n default=\"\",\n )\n\n parser.add_argument(\n \"-w\",\n \"--wordlist\",\n type=str,\n help=f\"{Color.INFORMATION}Path to the wordlist.{Color.END}\",\n action=\"store\",\n default=\"\",\n )\n\n parser.add_argument(\n \"-e\",\n \"--encoding\",\n type=str,\n help=f\"{Color.INFORMATION}Specify an encoding for the wordlist (https://docs.python.org/3/library/codecs.html#standard-encodings). The default encoding is platform dependent. Use 'iso8859_1' for rockyou. {Color.END}\",\n action=\"store\",\n default=None,\n )\n\n parser.add_argument(\n \"-o\",\n \"--output\",\n help=f\"{Color.INFORMATION}Output the cracked password to a new file.{Color.END}\",\n action=\"store\",\n )\n\n parser.add_argument(\n \"-c\",\n \"--copy\",\n help=f\"{Color.INFORMATION}Copy the password to the clipboard.{Color.END}\",\n action=\"store_true\",\n )\n\n return parser.parse_args()", "def commandline_options(args):\n # -------------------------------------------------------------------------------\n parser = argparse.ArgumentParser(\n description=\"Query and parse the caseroot files to gather metadata information\"\n \" that can be posted to the CESM experiments database.\"\n \" \"\n \" CMIP6 experiment case names must be reserved already in the\"\n \" experiment database. Please see:\"\n \" https://csesgweb.cgd.ucar.edu/expdb2.0 for details.\"\n )\n\n CIME.utils.setup_standard_logging_options(parser)\n\n parser.add_argument(\n \"--user\",\n dest=\"user\",\n type=str,\n default=None,\n required=True,\n help=\"User name for SVN CESM developer access (required)\",\n )\n\n parser.add_argument(\n \"--password\",\n dest=\"password\",\n action=PasswordPromptAction,\n default=\"\",\n required=True,\n help=\"Password for SVN CESM developer access (required)\",\n )\n\n parser.add_argument(\n \"--caseroot\",\n nargs=1,\n required=False,\n help=\"Fully quailfied path to case root directory (optional). \"\n \"Defaults to current working directory.\",\n )\n\n parser.add_argument(\n \"--workdir\",\n nargs=1,\n required=False,\n help=\"Fully quailfied path to directory for storing intermediate \"\n \"case files. A sub-directory called \"\n \"archive_temp_dir is created, populated \"\n \"with case files, and posted to the CESM experiments database and \"\n 'SVN repository at URL \"{0}\". '\n \"This argument can be used to archive a caseroot when the user \"\n \"does not have write permission in the caseroot (optional). \"\n \"Defaults to current working directory.\".format(_svn_expdb_url),\n )\n\n parser.add_argument(\n \"--expType\",\n dest=\"expType\",\n nargs=1,\n required=True,\n choices=_exp_types,\n help=\"Experiment type. For CMIP6 experiments, the case must already \"\n \"exist in the experiments database at URL \"\n ' \"http://csegweb.cgd.ucar.edu/expdb2.0\" (required). '\n 'Must be one of \"{0}\"'.format(_exp_types),\n )\n\n parser.add_argument(\n \"--title\",\n nargs=1,\n required=False,\n default=None,\n help=\"Title of experiment (optional).\",\n )\n\n parser.add_argument(\n \"--ignore-logs\",\n dest=\"ignore_logs\",\n action=\"store_true\",\n help=\"Ignore updating the SVN repository with the caseroot/logs files. \"\n \"The experiments database will be updated (optional).\",\n )\n\n parser.add_argument(\n \"--ignore-timing\",\n dest=\"ignore_timing\",\n action=\"store_true\",\n help=\"Ignore updating the the SVN repository with caseroot/timing files.\"\n \"The experiments database will be updated (optional).\",\n )\n\n parser.add_argument(\n \"--ignore-repo-update\",\n dest=\"ignore_repo_update\",\n action=\"store_true\",\n help=\"Ignore updating the SVN repository with all the caseroot files. \"\n \"The experiments database will be updated (optional).\",\n )\n\n parser.add_argument(\n \"--add-files\",\n dest=\"user_add_files\",\n required=False,\n help=\"Comma-separated list with no spaces of files or directories to be \"\n \"added to the SVN repository. These are in addition to the default added \"\n \"caseroot files and directories: \"\n '\"{0}, *.xml, user_nl_*\" (optional).'.format(_archive_list),\n )\n\n parser.add_argument(\n \"--dryrun\",\n action=\"store_true\",\n help=\"Parse settings and print what actions will be taken but \"\n \"do not execute the action (optional).\",\n )\n\n parser.add_argument(\n \"--query_cmip6\",\n nargs=2,\n required=False,\n help=\"Query the experiments database global attributes \"\n \"for specified CMIP6 casename as argument 1. \"\n \"Writes a json formatted output file, specified by argument 2, \"\n \"to subdir archive_files (optional).\",\n )\n\n parser.add_argument(\n \"--test-post\",\n dest=\"test_post\",\n action=\"store_true\",\n help=\"Post metadata to the test expdb2.0 web application server \"\n 'at URL \"http://csegwebdev.cgd.ucar.edu/expdb2.0\". '\n \"No --test-post argument defaults to posting metadata to the \"\n \"production expdb2.0 web application server \"\n 'at URL \"http://csegweb.cgd.ucar.edu/expdb2.0\" (optional).',\n )\n\n opts = CIME.utils.parse_args_and_handle_standard_logging_options(args, parser)\n\n return opts", "def read_arguments(argv):\n\tif argv[0] in ('1', '2'):\n\t\tconos_config['endpoint'] = endpoint[argv[0]]\n\telse:\n\t\tusage()\n\n\tif argv[1] in ('dev', 'test', 'int', 'prod'):\n\t\tconos_config['environment'] = argv[1]\n\t\tconos_config['sts_url'] = eval(argv[1] + '_sts_url')\n\t\tconos_config['aicuu_url'] = eval(argv[1] + '_aicuu_url')\n\telse:\n\t\tusage()\n\n\tif len(argv) == 6:\n\t\tconos_config['number_threads'] = '1'\n\telse:\n\t\tif argv[6] in ('1', '2', '3', '4', '5', '6', '7', '8'):\n\t\t\tconos_config['number_threads'] = argv[6]\n\t\telse:\n\t\t\tusage()\n\n\tconos_config['client_id'] = argv[2]\n\tconos_config['client_secret'] = argv[3]\n\tconos_config['input_file'] = argv[4]\n\tconos_config['output_file'] = argv[5]", "def parse_arguments(self):\n \n for arg in sys.argv[1:]:\n (key, sep, value) = arg.partition(\"=\")\n if sep != \"=\":\n raise ProcessorError(\"Illegal argument '%s'\" % arg)\n self.update_data(key, value)", "def GetArgs():\n\n parser = argparse.ArgumentParser(description='Process args for connecting to vCenter')\n parser.add_argument('-v', '--vc', required=True, action='store', help='vCenter')\n parser.add_argument('-u', '--user', required=True, action='store', help='vCenter Administrator')\n parser.add_argument('-p', '--password', required=False, action='store', help='Password')\n args = parser.parse_args()\n return args", "def prepare_args(self):\n args = []\n if self.login:\n args.extend(['-L', cfg['tools.hydra.loginfile']])\n if self._port.is_ipv6:\n args.append('-6')\n\n args.extend(['-P', cfg['tools.hydra.passwordfile'], '-s', str(self._port.number), str(self._port.node.ip),\n self.service, ])\n return args", "def process_command_line_arguments() -> Namespace:\n\n parser = build_parser()\n arguments = parser.parse_args()\n\n return arguments", "def init(args: Optional[List[bytes]] = None) -> None:\n warnings.warn(_deprecation_warning(), FutureWarning)\n parsed = {}\n if args:\n for arg in args:\n kv = arg.decode().split('=')\n if len(kv) == 2:\n parsed[kv[0]] = kv[1]\n collective.init(**parsed)", "def get_args():\n\n parser = argparse.ArgumentParser(description=\"Get DC, Clusters, Hosts and VM in JSON.\")\n parser.add_argument('-H', '--host', nargs=1, required=True, help='The vCenter to connect to',\n dest='host', type=str)\n parser.add_argument('-p', '--password', nargs=1, required=False,\n help='The password with which to connect to the VC. If not specified, the user is prompted at runtime for a password',\n dest='password', type=str)\n parser.add_argument('-u', '--user', nargs=1, required=True, help='The username with which to connect to the host',\n dest='username', type=str)\n args = parser.parse_args()\n return args", "def convert_dial_attrs_args(attrs, args):\n if attrs == None:\n attrs = {}\n attrs_list = [\"%s=%s\" % (k, v) for k, v in attrs.items()]\n if args == None:\n args = []\n c_attrs = list_of_strings_to_c_string_array(list(attrs_list)+[None])\n c_argv = list_of_strings_to_c_string_array(list(args)+[None])\n return c_attrs, c_argv", "def setup_cl_args(cls, parser):\n\n parser.add_argument(\n \"spec\", \n nargs=\"?\", \n default=\"\",\n help=\"Print info for this ptask spec. First checks relative to \" + \\\n \"the currently set ptask. If no match is found, checks \" + \\\n \"relative to the project root.\",\n )\n\n parser.add_argument(\n \"-v\", \"--versions\",\n dest=\"versions\",\n nargs=\"*\",\n default=[],\n help=\"Show subscriptions for the supplied verisons. Default \" + \\\n \"is current. A list of integers can be supplied for \" + \\\n \"specific versions, or 'all' for all versions.\"\n )", "def parse_arguments(args):", "def getCommandLineArgs():\n parser = argparse.ArgumentParser(prog=\"ccvalidator\",\n description=\"Validate and determine the issuer of a given credit card number\")\n parser.add_argument(\"card_num\", help=\"Credit card number\")\n\n return parser.parse_args()", "def main(cli_args):\n store_obj = cert_human_py3.CertChainStore.from_socket(\n host=cli_args.host, port=cli_args.port\n )\n\n print(store_obj.dump_json)", "def req_command(args):\n if args.files:\n die(\"Unexpected positional arguments\")\n\n subject_info = info_from_args(args)\n\n if subject_info.ca:\n msg('Request for CA cert')\n else:\n msg('Request for end-entity cert')\n subject_info.show(msg_show)\n\n # Load private key, create signing request\n key = load_key(args.key, load_password(args.password_file))\n req = create_x509_req(key, subject_info)\n do_output(req_to_pem(req), args, 'req')", "def extract_info_from_arguments(self):\r\n\r\n for sample_name in self.arguments['--sample_name']:\r\n self.list_of_samples_to_be_combined.append(sample_name)\r\n\r\n for file_path in self.arguments['--input_file']:\r\n file_object = Input_file(file_path, self.list_of_samples_to_be_combined)\r\n self.indices.update(file_object.indices)\r\n self.list_of_input_files.append(file_object)\r\n self.list_of_input_files_paths.append(file_path)\r\n\r\n if self.arguments['--out']:\r\n if self.arguments['--output_format'] == 'COMPRESSED':\r\n self.compressed = True\r\n elif self.arguments['--output_format'] == 'UNCOMPRESSED':\r\n self.compressed = False\r\n else:\r\n if self.list_of_input_files[0].compressed:\r\n self.compressed = True\r\n else:\r\n self.compressed = False", "def show_command(args):\n for fn in args.files:\n ext = os.path.splitext(fn)[1].lower()\n if ext == '.csr':\n cmd = ['openssl', 'req', '-in', fn, '-text']\n elif ext == '.crt':\n cmd = ['openssl', 'x509', '-in', fn, '-text']\n else:\n die(\"Unsupported file: %s\", fn)\n subprocess.check_call(cmd)", "def get_args_from_console(args):\n return {\n \"cleaning_policy\": args.cleaning_policy,\n \"clear\": args.clear,\n \"content\": args.content,\n \"dry_run\": args.dry_run,\n \"force\": args.force,\n \"in_lines\": args.in_lines,\n \"max_size\": args.max_size,\n \"regex\": args.regex,\n \"restore\": args.restore,\n \"rmdir\": args.rmdir,\n \"short\": args.short,\n \"silent\": args.silent,\n \"storage_time\": args.storage_time,\n \"wastebasket_path\": args.wastebasket_path\n }", "def get_args():\n parser = argparse.ArgumentParser(\n description='Arguments for talking to vCenter')\n\n parser.add_argument('-s', '--host',\n required=True,\n action='store',\n help='vSpehre service to connect to')\n\n parser.add_argument('-o', '--port',\n type=int,\n default=443,\n action='store',\n help='Port to connect on')\n\n parser.add_argument('-u', '--user',\n required=True,\n action='store',\n help='Username to use')\n\n parser.add_argument('-p', '--password',\n required=False,\n action='store',\n help='Password to use')\n\n parser.add_argument('-v', '--vm-name',\n required=True,\n action='store',\n help='Name of the VM you wish to operate on')\n\n parser.add_argument('--no-ssl',\n action='store_true',\n help='Skip SSL verification')\n\n parser.add_argument('--operation',\n required=True,\n action='store',\n help='start, suspend, or stop')\n\n parser.add_argument('-f', '--force',\n required=False,\n action='store',\n default=None)\n \n args = parser.parse_args()\n\n if not args.password:\n args.password = getpass.getpass(\n prompt='Enter password')\n\n return args", "def setup_cmd_args():\n parser = argparse.ArgumentParser(description=\"This program will query G-POD and COPHUB on the same datasets, in order to obtain the number of data results, compare them compile a report with the differences.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n # parser.add_argument(\"root_dir\", help=\"The root directory containing data to check\")\n # parser.add_argument(\"--workspace\", help=\"Set Workspace manually\")\n parser.add_argument(\"--outputlist\", help=\"Folder to write the output lists with the un-synced products.\", default=\"c:\\\\temp\\\\\")\n parser.add_argument(\"--daysback\", help=\"Report with a given number of days back from today\", default=0)\n parser.add_argument(\"--dataset\", help=\"Set which dataset to query (chose S3A_SR_1_SRA_A_PREOPS or S3B_SR_1_SRA_A_NTC)\")\n parser.add_argument(\"--startdate\", help=\" The Start Date (format: YYYY-MM-DD) \", default=\"2016-06-01\")\n parser.add_argument(\"--enddate\",help=\" The End Date (format: YYYY-MM-DD)\")\n parser.add_argument(\"--cphubuser\",help=\"COPHUB username\", required=True)\n parser.add_argument(\"--cphubpw\",help=\"COPHUB password\", required=True)\n parser.add_argument(\"-email\", type=str, help=\"Email to send the results\", action=\"append\")\n parser.add_argument('-t', action='store_true', help=\"Today as enddate. Otherwise the last day of the previous month is considered.\")\n parser.add_argument('-n', action='store_true', help=\"Normal numeric check\")\n parser.add_argument('-m', action='store_true', help=\"Monthly check with product listing.\")\n return parser.parse_args()", "def arguments(**kw):\n return export_arguments('cc', _all_arguments, _groups, **kw)", "def handle_cmdline_args():\n\n parser = argparse.ArgumentParser(\n description='Generate synthetic data from a specification in a json '\n 'file using the \"synth-method\" described in the json file. ')\n\n parser.add_argument(\n '-i', dest='infile', required=True,\n help='The input json file. Must contain a \"synth-method\" property')\n\n parser.add_argument(\n '-o', dest='outfile_prefix', required=True, help='The prefix of the output paths (data json and csv), relative to the QUIPP-pipeline root directory')\n\n args = parser.parse_args()\n return args", "def define_options(self):\n\n from clinica.engine.cmdparser import PIPELINE_CATEGORIES\n\n clinica_comp = self._args.add_argument_group(PIPELINE_CATEGORIES['CLINICA_COMPULSORY'])\n clinica_comp.add_argument(\"caps_directory\",\n help='Path to the CAPS directory.')\n clinica_comp.add_argument(\"list_bvalues\", type=str,\n help='String listing all the shells (i.e. the b-values) in the corrected DWI datasets comma separated (e.g, 0,300,700,2200)')\n # Optional arguments\n clinica_opt = self._args.add_argument_group(PIPELINE_CATEGORIES['CLINICA_OPTIONAL'])\n\n clinica_opt.add_argument(\"-wd\", \"--working_directory\",\n help='Temporary directory to store pipeline intermediate results')\n clinica_opt.add_argument(\"-np\", \"--n_procs\", type=int, default=4,\n help='Number of cores used to run in parallel')\n clinica_opt.add_argument(\"-tsv\", \"--subjects_sessions_tsv\",\n help='TSV file containing a list of subjects with their sessions.')", "def get_argdict(cls, toolchain, args):\n return {} # Empty must be overloaded (if required)", "def command_line_start(argv, program_name):\n cl_parser = argparse.ArgumentParser(description='Tinkerforge Data Logger')\n\n cl_parser.add_argument('config_file', help=\"Path to the configuration file\")\n cl_parser.add_argument('-v', action=\"store_true\", dest=\"validate\",\n help=\"Just process the validation of the configuration file\")\n\n results = cl_parser.parse_args(argv)\n\n arguments_map = {}\n arguments_map[CONSOLE_CONFIG_FILE] = results.config_file\n arguments_map[CONSOLE_VALIDATE_ONLY] = results.validate\n\n return arguments_map", "def process_cmdline_args():\n parser = argparse.ArgumentParser(description='create s3 account using s3cipher generated keys')\n subparsers = parser.add_subparsers(dest='action')\n create_bg_acc = subparsers.add_parser('CreateBGDeleteAccount', help='Create background delete service account')\n\n create_bg_acc.add_argument('--ldapuser', help='sgiam ldap user name', type=str, required=True)\n create_bg_acc.add_argument('--ldappasswd', help='sgiam ldap user password', type=str, required=True)\n\n args = parser.parse_args()\n\n try:\n if args.action in g_supported_ldap_action_table.keys():\n action_obj = LdapAccountAction(args.ldapuser, args.ldappasswd)\n if args.action == 'CreateBGDeleteAccount':\n action_obj.create_account(g_supported_ldap_action_table[args.action])\n\n result_dict = g_supported_ldap_action_table[args.action]\n action_obj.print_create_account_results(result_dict)\n except Exception as e:\n print(\"Exception : {}\".format(e))\n print(\"Traceback : {}\".format(traceback.format_exc()))\n parser.print_help()", "def add_args(self, parser):", "def initialise(self, args, environ):", "def parse_args():\n parser = argparse.ArgumentParser(\n description='''\n {nm}: TCP over TLS server to accept requests.\\n\n '''.format(nm=sys.argv[0]))\n parser.add_argument('-p',\n '--port',\n help='Server port to connect to, defaults to \"9999\".',\n required=False,\n default='9999')\n parser.add_argument('-c',\n '--cert',\n help='Server certificate file with path,'\n ' defaults to \"server.pem\" in current directory.',\n required=False,\n default='server.pem')\n parser.add_argument('-k',\n '--key',\n help='Server certificate key file with path,'\n ' defaults to \"server.key\" in current directory.',\n required=False,\n default='server.key')\n parser.add_argument('-ca',\n '--cert-auth',\n help='CA certificate file with path,'\n ' defaults to \"ca_cert.pem\" in current directory.',\n required=False,\n dest='ca_cert',\n default='ca_cert.pem')\n parser.add_argument('--log-level',\n help='Logger level, defaults to \"DEBUG\"',\n required=False,\n default='DEBUG')\n return vars(parser.parse_args())", "def _argsForSubprocess(self) -> list[str]:\n pass", "def build_cmdline():\n\tcmd=optparse.OptionParser(version=__version__)\n\tcmd.add_option('-c', '', dest='config_fname',type=\"string\", help='WHM/WHMCS configuration file', metavar=\"FILE\")\n\tcmd.add_option('-s', '', dest=\"whm_section\", type=\"string\", help=\"WHM server to use. Specify section name. eg: -s ds01\", metavar=\"SERVER\")\n\tcmd.add_option('','--search', action=\"store\", dest='search', type=\"string\", help=\"Search client by DNS domain name or cPanel username\", metavar=\"STRING\")\n\tcmd.add_option('-d', '', dest='whmcs_deptid', type=\"int\", help=\"WHMCS Department ID\", metavar=\"INT\") \n\tcmd.add_option('-m', '', dest='whmcs_ticketmsg_fname', type=\"string\", help=\"WHMCS abuse ticket template file\", metavar='FILE')\n\tcmd.add_option('-r', '', dest='whm_suspendmsg_fname', type=\"string\", help='cPanel account suspension reason template file', metavar='FILE')\n\tcmd.add_option('-f', '', dest='whmcs_proofmsg_fname', type=\"string\", help='Abuse proof file which will be appended to abuse ticket message', metavar='FILE')\n\tcmd.add_option('', '--subject', dest='whmcs_subject', type=\"string\", help='Specify abuse ticket subject title.', metavar=\"STRING\")\n\tcmd.add_option('-y', '--allyes', dest='allyes', action=\"store_true\", default=False, help='Assume yes as an answer to any question which would be asked')\n\treturn cmd", "def entrypoint_wrapper(argc, argv):\n list = [\"\"] * argc\n i = 0\n while i < argc:\n list[i] = rffi.charp2str(argv[i])\n i += 1\n return entrypoint(list)", "def getArgs():\n parser = argparse.ArgumentParser(\n description='Arguments for smtp server, creds, and input files')\n parser.add_argument('-s', '--host', required=False, action='store',\n help='Remote smtp server to connect use')\n parser.add_argument('--port', required=False, action='store',\n default='25',\n help='port for the smtp server')\n parser.add_argument('--subject', required=False, action='store',\n help='subject for email message')\n parser.add_argument('--sender', required=False, action='store',\n help='email address message will be sent as')\n parser.add_argument('-u', '--user', required=False, action='store',\n help='username/email for smtp')\n parser.add_argument('-p', '--password', required=False, action='store',\n help='Password to use for smtp')\n parser.add_argument('--prompt', required=False, action='store',\n help='Promt for password to use for smtp')\n parser.add_argument('--silent', required=False, action='store_true',\n help='supress output to screen')\n parser.add_argument('--test', required=False, action='store_true',\n help='Display resulting emails in stdout and do not send')\n parser.add_argument('--csvfile', required=False, action='store',\n help='Filename and path of csv file')\n parser.add_argument('--template', required=False, action='store',\n help='Filename and path of csv file')\n parser.add_argument('--config', required=False, action='store',\n help='config file with auth, server, and subject')\n\n args = parser.parse_args()\n if args.config:\n print(\"config\")\n configfile = open(args.config)\n config = json.load(configfile)\n for key, value in config.iteritems():\n print(\"key: \" + key + \" value: \" + value)\n parser.set_defaults(key=value)\n #args.key = value\n #print(key + \" \" + args.key)\n args = parser.parse_args()\n return args", "def _get_args(self):\n parser = ArgumentParser(\n description=\"Dynamically generates Snakefiles for data \"\n \"integration and machine learning pipelines.\"\n )\n\n parser.add_argument(\n \"-c\",\n \"--config\",\n help=(\n \"Configuration filepath. (Will look for file named config.yml \"\n \"in current working directory, if none specified.)\"\n ),\n )\n\n parser.add_argument(\n \"-r\",\n \"--run\",\n default=False,\n help=(\n \"Runs pipeline, in addition to generating Snakefile.\"\n ),\n )\n\n # convert command-line args to a dict and return\n args = parser.parse_args()\n\n args = dict(\n (k, v) for k, v in list(vars(args).items()) if v is not None\n )\n\n return args", "def collect_args() -> argparse.Namespace:\n parser = argparse.ArgumentParser()\n parser.add_argument('--config', help=\"Config file\", type=str, default=Path(__file__).parent / \"data/params.yaml\")\n args = parser.parse_args()\n return args", "def _parse_arguments(self, args):\r\n try:\r\n course_id = SlashSeparatedCourseKey.from_deprecated_string(args[0])\r\n filename = args[1]\r\n except InvalidKeyError:\r\n raise CommandError(\"Unparsable course_id\")\r\n except IndexError:\r\n raise CommandError(\"Insufficient arguments\")\r\n\r\n # If filename is '-' save to a temp file\r\n pipe_results = False\r\n if filename == '-':\r\n filename = mktemp()\r\n pipe_results = True\r\n\r\n return course_id, filename, pipe_results", "def setup_request_commandline() -> Request:\r\n parser = argparse.ArgumentParser()\r\n parser.add_argument(\"key\", help=\"The key to use when encrypting or \"\r\n \"decrypting. This needs to be of \"\r\n \"length 8, 16 or 24\")\r\n parser.add_argument(\"-s\", \"--string\", help=\"The string that needs to be \"\r\n \"encrypted or decrypted\")\r\n parser.add_argument(\"-f\", \"--file\", help=\"The text file that needs to be\"\r\n \"encrypted or decrypted\")\r\n parser.add_argument(\"-o\", \"--output\", default=\"print\",\r\n help=\"The output of the program. This is 'print' by \"\r\n \"default, but can be set to a file name as well.\")\r\n parser.add_argument(\"-m\", \"--mode\", default=\"en\",\r\n help=\"The mode to run the program in. If 'en' (default)\"\r\n \" then the program will encrypt, 'de' will cause \"\r\n \"the program to decrypt\")\r\n try:\r\n args = parser.parse_args()\r\n request = Request()\r\n request.encryption_state = CryptoMode(args.mode)\r\n request.data_input = args.string\r\n request.input_file = args.file\r\n request.output = args.output\r\n request.key = args.key\r\n print(request)\r\n return request\r\n except Exception as e:\r\n print(f\"Error! Could not read arguments.\\n{e}\")\r\n quit()", "def retrieve_args_dict():\n process_args = sys.argv[1:]\n dictionary = dict()\n for process_arg in process_args:\n splitted = process_arg.split(\":\")\n if len(splitted) > 1:\n key = splitted[0]\n value = \"\".join(splitted[1:])\n dictionary[key] = value\n return dictionary", "def build_args(self, project_update, private_data_dir, passwords):\n args = []\n if getattr(settings, 'PROJECT_UPDATE_VVV', False):\n args.append('-vvv')\n if project_update.job_tags:\n args.extend(['-t', project_update.job_tags])\n return args", "def setup_request_commandline() -> Request:\n parser = argparse.ArgumentParser()\n parser.add_argument(\"key\", help=\"The key to use when encrypting or \"\n \"decrypting. This needs to be of \"\n \"length 8, 16 or 24\")\n parser.add_argument(\"-s\", \"--string\", help=\"The string that needs to be \"\n \"encrypted or decrypted\")\n parser.add_argument(\"-f\", \"--file\", help=\"The text file that needs to be\"\n \"encrypted or decrypted\")\n parser.add_argument(\"-o\", \"--output\", default=\"print\",\n help=\"The output of the program. This is 'print' by \"\n \"default, but can be set to a file name as well.\")\n parser.add_argument(\"-m\", \"--mode\", default=\"en\",\n help=\"The mode to run the program in. If 'en' (default)\"\n \" then the program will encrypt, 'de' will cause \"\n \"the program to decrypt\")\n try:\n args = parser.parse_args()\n request = Request()\n request.encryption_state = CryptoMode(args.mode)\n request.data_input = args.string\n request.input_file = args.file\n request.output = args.output\n request.key = args.key\n print(request)\n return request\n except Exception as e:\n print(f\"Error! Could not read arguments.\\n{e}\")\n quit()", "def handle_cmdline():\n\n cmdline = ArgumentParser(init_args=['address', 'arch', 'file'],\n address_required=True, address_default=None,\n file_required=True,\n file_help='Flash or memory image to inspect',\n formatter_class=RawDescriptionHelpFormatter,\n usage=_USAGE, description=_DESCRIPTION, epilog=_EPILOG)\n\n cmdline.add_argument('--longhelp',\n choices=['Y', 'N'],\n default=None,\n help=_LONGHELP_TEXT)\n\n cmdline.add_argument('--autocomplete',\n choices=['Y', 'N'],\n default=None,\n help=_AUTOCOMPLETE_TEXT)\n\n cmdline.add_argument('--threshold',\n type=int,\n default=5,\n help='Minimum table size to report. Default: 5')\n\n cmdline.add_argument('--subcmds',\n action='store_true',\n default=False,\n help='Include sub-command tables in displayed results')\n\n cmdline.add_argument('--details',\n action='store_true',\n default=False,\n help='Display more detailed output')\n\n args = cmdline.parse_args()\n\n if args.longhelp is not None:\n args.longhelp = args.longhelp == 'Y'\n\n if args.autocomplete is not None:\n args.autocomplete = args.autocomplete == 'Y'\n\n return args", "def parse_arguments(args: list = None) -> Dict[str, str]:\n arg_parser = argparse.ArgumentParser(description=\"Console command to crypt \"\n \"and decrypt texts using \"\n \"classic methods. It also \"\n \"performs crypto attacks \"\n \"against those methods.\\n\",\n epilog=\"Follow cifra development at: \"\n \"<https://github.com/dante-signal31/cifra>\")\n cifra_subparsers = arg_parser.add_subparsers(help=\"Available modes\",\n dest=\"mode\",\n required=True)\n # DICTIONARY MANAGEMENT.\n dictionary_parser = cifra_subparsers.add_parser(name=\"dictionary\",\n help=\"Manage dictionaries to \"\n \"perform crypto attacks.\")\n dictionary_actions_subparser = dictionary_parser.add_subparsers(help=\"Action to perform.\",\n dest=\"action\")\n # DICTIONARY CREATION.\n dictionary_create_parser = dictionary_actions_subparser.add_parser(name=\"create\",\n help=\"Create a dictionary of unique words.\")\n dictionary_create_parser.add_argument(\"dictionary_name\",\n type=str,\n help=\"Name for the dictionary to create.\",\n metavar=\"NEW_DICTIONARY_NAME\")\n dictionary_create_parser.add_argument(\"-i\", \"--initial_words_file\",\n type=_check_is_file,\n help=\"Optionally you can load in the dictionary words located in a text file\",\n metavar=\"PATH_TO FILE_WITH_WORDS\")\n # DICTIONARY REMOVAL.\n dictionary_delete_parser = dictionary_actions_subparser.add_parser(name=\"delete\",\n help=\"Remove an existing dictionary.\")\n dictionary_delete_parser.add_argument(\"dictionary_name\",\n type=str,\n help=\"Name for the dictionary to delete.\",\n metavar=\"DICTIONARY_NAME_TO_DELETE\")\n # DICTIONARY UPDATING.\n dictionary_update_parser = dictionary_actions_subparser.add_parser(name=\"update\",\n help=\"Add words to an existing dictionary.\")\n dictionary_update_parser.add_argument(\"dictionary_name\",\n type=str,\n help=\"Name for the dictionary to update with additional words.\",\n metavar=\"DICTIONARY_NAME_TO_UPDATE\")\n dictionary_update_parser.add_argument(\"words_file\",\n type=_check_is_file,\n help=\"Pathname to a file with words to add to dictionary\",\n metavar=\"PATH_TO_FILE_WITH_WORDS\")\n # DICTIONARY LISTING.\n _ = dictionary_actions_subparser.add_parser(name=\"list\",\n help=\"Show existing dictionaries.\")\n # CIPHER MANAGEMENT.\n cipher_parser = cifra_subparsers.add_parser(name=\"cipher\",\n help=\"Cipher a text using a key.\")\n cipher_parser.add_argument(\"algorithm\",\n choices=CIPHERING_ALGORITHMS,\n type=str,\n help=\"Algorithm to use to cipher.\",\n metavar=\"ALGORITHM_NAME\")\n cipher_parser.add_argument(\"key\",\n type=str,\n help=\"Key to use to cipher.\",\n metavar=\"CIPHERING_KEY\")\n cipher_parser.add_argument(\"file_to_cipher\",\n type=_check_is_file,\n help=\"Path to file with text to cipher.\",\n metavar=\"FILE_TO_CIPHER\")\n cipher_parser.add_argument(\"-o\", \"--ciphered_file\",\n type=str,\n help=\"Path to output file to place ciphered text. If not used then\"\n \"ciphered text will be dumped to console.\",\n metavar=\"OUTPUT_CIPHERED_FILE\")\n cipher_parser.add_argument(\"-c\", \"--charset\",\n type=str,\n help=f\"Default charset is: {cifra.cipher.common.DEFAULT_CHARSET}, but you can set here \"\n f\"another.\",\n metavar=\"CHARSET\")\n # DECIPHERING MANAGEMENT\n decipher_parser = cifra_subparsers.add_parser(name=\"decipher\",\n help=\"Decipher a text using a key.\")\n decipher_parser.add_argument(\"algorithm\",\n choices=CIPHERING_ALGORITHMS,\n type=str,\n help=\"Algorithm to use to decipher.\",\n metavar=\"ALGORITHM_NAME\")\n decipher_parser.add_argument(\"key\",\n type=str,\n help=\"Key to use to decipher.\",\n metavar=\"CIPHERING_KEY\")\n decipher_parser.add_argument(\"file_to_decipher\",\n type=_check_is_file,\n help=\"Path to file with text to decipher.\",\n metavar=\"FILE_TO_DECIPHER\")\n decipher_parser.add_argument(\"-o\", \"--deciphered_file\",\n type=str,\n help=\"Path to output file to place deciphered text. If not used then\"\n \"deciphered text will be dumped to console.\",\n metavar=\"OUTPUT_DECIPHERED_FILE\")\n decipher_parser.add_argument(\"-c\", \"--charset\",\n type=str,\n help=f\"Default charset is: {cifra.cipher.common.DEFAULT_CHARSET}, but you can set here \"\n f\"another.\",\n metavar=\"CHARSET\")\n # ATTACK MANAGEMENT\n attack_parser = cifra_subparsers.add_parser(name=\"attack\",\n help=\"Attack a ciphered text to get its plain text\")\n attack_parser.add_argument(\"algorithm\",\n choices=CIPHERING_ALGORITHMS,\n type=str,\n help=\"Algorithm to attack.\",\n metavar=\"ALGORITHM_NAME\")\n attack_parser.add_argument(\"file_to_attack\",\n type=_check_is_file,\n help=\"Path to file with text to attack.\",\n metavar=\"FILE_TO_ATTACK\")\n attack_parser.add_argument(\"-o\", \"--deciphered_file\",\n type=str,\n help=\"Path to output file to place deciphered text. If not used then\"\n \"deciphered text will be dumped to console.\",\n metavar=\"OUTPUT_DECIPHERED_FILE\")\n attack_parser.add_argument(\"-c\", \"--charset\",\n type=str,\n help=f\"Default charset is: {cifra.cipher.common.DEFAULT_CHARSET}, but you can set here \"\n f\"another.\",\n metavar=\"CHARSET\")\n\n parsed_arguments = vars(arg_parser.parse_args(args))\n filtered_parser_arguments = {key: value for key, value in parsed_arguments.items()\n if value is not None}\n return filtered_parser_arguments", "def add_cipher_args(parser):\n parser.add_argument(\n \"--data\",\n \"-d\",\n help=\"Raw data to encrypt or decrypt. If not provided will be prompted.\"\n )\n\n parser.add_argument(\n \"--decrypt\",\n \"-D\",\n action=\"store_true\",\n default=False,\n help=\"When True will decrypt data. When False will encrypt data.\"\n )\n\n parser.add_argument(\n \"--encoding\",\n \"-e\",\n choices=ENCODING_CHOICES,\n default=ENCODING_DEFAULT,\n help=\"Encoding to apply to encrypted data or data when decrypting. Choices:{}\".format(\n ENCODING_CHOICES\n ),\n type=str.upper\n )\n\n parser.add_argument(\n \"--key\",\n \"-k\",\n help=\"Key used to encrypt or decrypt. If not provided will be prompted.\"\n )\n\n parser.add_argument(\n \"--mode\",\n \"-m\",\n choices=CIPHER_CHOICES,\n default=CIPHER_DEFAULT,\n help=\"Cipher to execute. Choices:{}\".format(CIPHER_CHOICES),\n type=str.upper\n )", "def parse_command_line_args():\n parser = argparse.ArgumentParser(description=(\n 'HYAKUYOBAKO Data sender.'))\n parser.add_argument(\n '--project_id', required=True, help='GCP cloud project name')\n parser.add_argument(\n '--registry_id', required=True, help='Cloud IoT Core registry id')\n parser.add_argument(\n '--device_id', required=True, help='Cloud IoT Core device id')\n parser.add_argument(\n '--private_key_file',\n required=True,\n help='Path to private key file.')\n parser.add_argument(\n '--algorithm',\n choices=('RS256', 'ES256'),\n required=True,\n help='The encryption algorithm to use to generate the JWT.')\n parser.add_argument(\n '--cloud_region', default='us-central1', help='GCP cloud region')\n parser.add_argument(\n '--ca_certs',\n default='roots.pem',\n help=('CA root from https://pki.google.com/roots.pem'))\n parser.add_argument(\n '--message_type',\n choices=('event', 'state'),\n default='event',\n required=True,\n help=('Indicates whether the message to be published is a '\n 'telemetry event or a device state message.'))\n parser.add_argument(\n '--base_url',\n default=_BASE_URL,\n help=('Base URL for the Cloud IoT Core Device Service API'))\n parser.add_argument(\n '--jwt_expires_minutes',\n default=20,\n type=int,\n help=('Expiration time, in minutes, for JWT tokens.'))\n parser.add_argument(\n '--id',\n default=999,\n type=int,\n help=('Device id, not IoT Core device id for unique key.'))\n parser.add_argument(\n '--location_logitude',\n default=0.0,\n type=float,\n help=('Logitude of this deice. ex)35.658581'))\n parser.add_argument(\n '--location_latitude',\n default=0.0,\n type=float,\n help=('Latitude of this deice. ex)139.745433'))\n\n return parser.parse_args()", "def prepare_arguments(self, parser):\n pass", "def options():\n\n parser = argparse.ArgumentParser(description=\"PlantCV Clowder image analysis script for the DDPSC indoor system.\",\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n #parser.add_argument(\"-v\", \"--vis\", help=\"Input VIS/RGB image.\", required=True)\n #parser.add_argument(\"-n\", \"--nir\", help=\"Input NIR image.\", required=True)\n #parser.add_argument(\"-p\", \"--perspective\", help=\"Camera perspective (side-view, top-view)\", required=True)\n parser.add_argument(\"-d\", \"--dataset\", help=\"Clowder Dataset key.\", required=True)\n parser.add_argument(\"-u\", \"--url\", help=\"Clowder URL.\", required=True)\n parser.add_argument(\"-U\", \"--username\", help=\"Clowder username.\", required=True)\n parser.add_argument(\"-p\", \"--password\", help=\"Clowder password.\", required=True)\n\n args = parser.parse_args()\n\n # if not os.path.exists(args.vis):\n # raise IOError(\"File does not exist: {0}\".format(args.vis))\n # if not os.path.exists(args.nir):\n # raise IOError(\"File does not exist: {0}\".format(args.nir))\n\n return args", "def parse_args():\n # Define what commandline arguments can be accepted\n parser = argparse.ArgumentParser()\n parser.add_argument(Flags.CSV_DIR,metavar=\"CSV_DIRECTORY\", type=check_str_is_dir,\n help=\"Source directory containing Digikey CSV files\")\n parser.add_argument(Flags.PDF_DIR,metavar=\"PDF_DIRECTORY\", type=check_str_is_dir,\n help=\"Directory to save the PDF datasheets to\")\n parser.add_argument('--csv_pages', dest=Flags.CSV_PAGES,metavar=\"NUM_PAGES\", type=int, default=1,\n help=\"How many 500-row pages to download from Digikey (default 1)\")\n parser.add_argument('--fv_code', dest=Flags.FV_CODE,metavar=\"FV_CODE\", default='ffe002af', #op-amp\n help=\"The FV code of the part family on Digikey (default op-amps)\")\n parser.add_argument('--encrypted', dest=Flags.KEEP_ENCRYPTED, action='store_true', default=False, help=\"Do not filter encrypted PDFs\")\n parser.add_argument('--skip_csv', dest=Flags.SKIP_CSV_DL, action='store_true', default=False, help=\"Do not redownload the CSV.\")\n parser.add_argument('--skip_pdf', dest=Flags.SKIP_PDF_DL, action='store_true', default=False, help=\"Do not redownload the PDFs.\")\n parser.add_argument('--ocr', dest=Flags.KEEP_OCR, action='store_true', default=False, help=\"Do not filter PDFs that need OCR\")\n parser.add_argument('--duplicates', dest=Flags.KEEP_DUPLICATES, action='store_true', default=False, help=\"Do not filter duplicate PDFs (NOT IMPLEMENTED)\")\n parser.add_argument('--version', action='version', version='%(prog)s 0.0.0')\n args = vars(parser.parse_args())\n\n # TODO (lwhsiao): We should also add option to automatically select a parameterized\n # number of files and organize as train/test/dev\n\n Flags.parsed_args = args\n return args", "def build_arguments(self, *cmd_args, **cmd_kwargs):\n args = []\n args.extend(cmd_args)\n\n for raw_key, value in cmd_kwargs.items():\n if len(raw_key) == 1:\n args.append('-{}'.format(raw_key))\n else:\n key = raw_key.replace('_', '-')\n args.append('--{}'.format(key))\n\n if value is True:\n # If True, it is enough.\n # e.g.: system=True translates to --system\n continue\n\n args.append(str(value))\n\n return args", "def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n client = holder.client\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.regionSslCertificates\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])", "def fill_args(cls, toolchain, parser):\n pass # pass must be overloaded (if required)", "def main():\n args = parse_args()\n process_args(args)", "def get_arguments():\n parser = argparse.ArgumentParser(description=\"Simple Jarvice CLI\",\n add_help=False)\n auth_group = parser.add_argument_group('auth', description='Configuration')\n auth_group.add_argument('-username', help='Jarvice username')\n auth_group.add_argument('-apikey', help='Jarvice API key')\n auth_group.add_argument('-apiurl', help='Jarvice API URL',\n default='https://api.jarvice.com')\n auth_group.add_argument('-v', help='loglevel',\n choices=['INFO', 'WARN', 'DEBUG', 'CRITICAL'],\n dest='loglevel', default='CRITICAL')\n auth_group.add_argument(\n 'command',\n choices=['connect', 'submit', 'info', 'status',\n 'action', 'terminate', 'shutdown', 'jobs',\n 'output', 'tail', 'apps', 'machines', 'summary',\n 'download', 'upload', 'wait_for', 'shutdown_all',\n 'terminate_all', 'ls'])\n\n known, unknown = parser.parse_known_args()\n return known, unknown, parser", "def definearguments(self, customparser):\n if not customparser:\n return\n customparser.add_option(\n '--url',\n dest='url',\n help=\"Use the provided iLO URL to login.\",\n default=None,\n )\n customparser.add_option(\n '-u',\n '--user',\n dest='user',\n help=\"If you are not logged in yet, including this flag along\"\\\n \" with the password and URL flags can be used to log into a\"\\\n \" server in the same command.\"\"\",\n default=None,\n )\n customparser.add_option(\n '-p',\n '--password',\n dest='password',\n help=\"\"\"Use the provided iLO password to log in.\"\"\",\n default=None,\n )\n customparser.add_option(\n '-e',\n '--enc',\n dest='encode',\n action='store_true',\n help=SUPPRESS_HELP,\n default=False,\n )", "def process_args(args, multiple_chrs=False, tool_name=\"\"):\r\n # Retrieves the dataset.\r\n dataset, chrom, tool = None, None, None\r\n\r\n dataset = DATASETS.get(args.dataset, None)\r\n if not dataset:\r\n print \"Unknown dataset %s.\" % args.dataset\r\n return dataset, chrom, tool\r\n dataset.set_work_dir(args.path)\r\n\r\n # Retreieves the Chromosome(s).\r\n if multiple_chrs:\r\n chrom = [dataset.get_chr(chr_num) for chr_num in args.chrs]\r\n else:\r\n chrom = dataset.get_chr(args.chr)\r\n\r\n # Retrieves the tool.\r\n if tool_name:\r\n full_name = get_tool(tool_name)\r\n if not full_name:\r\n return dataset, chrom, tool\r\n tool = TOOLS[full_name]\r\n tool.set_work_dir(args.path)\r\n\r\n return dataset, chrom, tool", "def get_cmd_args():\n\n\n\t#Creates the Argument Parser\n\tparser = ArgumentParser(description = \"ID Lab qPCR Analysis v\" + VERSION + \" \" + QUALITY)\n\n\t#Adds the input file argument\n\tparser.add_argument('-f', '--file',\n\t\t\t\tnargs = '+',\n\t\t\t\ttype = FileType('r'),\n\t\t\t\trequired = True)\n\n\t#Adds the output directory\n\tparser.add_argument('-o', '--output',\n\t\t\t\trequired = True)\n\n\t#Adds the model argument, to select between the three models\n\tparser.add_argument('-m', '--mod', '--model',\n\t\t\t\tnargs = '?',\n\t\t\t\tchoices = ['relative', 'absolute', 'stability'],\n\t\t\t\trequired = True)\n\n\t#Adds the control genes argument, taking a list of gene names\n\tparser.add_argument('-cg', '--cgenes', '--controlgenes',\n\t\t\t\tnargs = '+',\n\t\t\t\trequired = True)\n\n\t#Adds the optional control sample argument for the stability model, taking a list of sample names\n\tparser.add_argument('-cs', '--csample', '--controlsamples',\n\t\t\t\tnargs = '*')\n\n\t#Adds optional outlier cutoff\n\tparser.add_argument('-oc', '--ocutoff',\n\t\t\t\ttype = float,\n\t\t\t\tdefault = 0.3)\n\n\t#Adds optional max outliers\n\tparser.add_argument('-om', '--omax',\n\t\t\t\ttype = float,\n\t\t\t\tdefault = 0.5)\n\n\t#Adds optional encoding \n\tparser.add_argument('-e', '--encoding',\n\t\t\t\tdefault = 'ISO-8859-1')\n\n\t#Adds optional header size\n\tparser.add_argument('-hd', '--header',\n\t\t\t\tdefault = 47)\n\n\treturn vars(parser.parse_args())", "def get_args(): #{{{\n\n parser = argparse.ArgumentParser(\n description=\"Generate a passphrase.\",\n formatter_class=argparse.RawTextHelpFormatter)\n\n parser.add_argument(\"-n\", \"--num_words\",\n type=int, default=4,\n help=\"Number of words in the passphrase.\\n\" +\n \"Default : 4\")\n\n parser.add_argument(\"-w\", \"--wordlist\",\n type=str, default=\"passphra.se.txt\",\n help=\"Path to wordlist.\\n\" +\n \"Default : passphra.se.txt\\n\" +\n \"Note : Wordlist must have each word \" +\n \"separated by a newline.\")\n\n parser.add_argument(\"-m\", \"--max_length\",\n type=int, default=\"8\",\n help=\"Maximum length of a word.\\n\" +\n \"Default : 8\")\n\n parser.add_argument(\"-p\", \"--punctuation\",\n action=\"store_true\",\n help=\"Allow words with punctuation in them.\\n\" +\n \"Default : False\")\n \n parser.add_argument(\"-l\", \"--lowercase\",\n action=\"store_true\",\n help=\"Make the first letter of each word lowercase.\\n\" +\n \"Default : False\")\n\n parser.add_argument(\"-ns\", \"--no_space\",\n action=\"store_true\",\n help=\"No space between words.\\n\" +\n \"Default : False\")\n\n parser.add_argument(\"-np\", \"--num_phrases\",\n type=int, default=1,\n help=\"Number of passphrases to generate.\\n\" +\n \"Default : 1\")\n\n parser.add_argument(\"-o\", \"--outfile\",\n type=str, default=\"\",\n help=\"Path to output file.\\n\" +\n \"Default : print to stdout\\n\")\n\n args = parser.parse_args()\n return args #}}}", "def _setup_arguments(self):\n\n self._parser.add_argument(\"-a\", \"--area-interest\",\n help=\"Area of interest to process, \"\n \"shapefile path\", required=True)\n # FUTURE VERSIONS\n # self._parser.add_argument(\"-s\", \"--srtm-dem\",\n # help=\"Path to SRTM DEM file. Zip format\",\n # required=False)\n # self._parser.add_argument(\"-y\", \"--hsheds-dem\",\n # help=\"Path to HSHEDS DEM file. Zip format\",\n # required=False)\n # self._parser.add_argument(\"-g\", \"--groves-file\",\n # help=\"Path to groves classification file. \"\n # \"Zip format\",\n # required=False)", "def parse_arguments():\n\n parser = argparse.ArgumentParser(\n description=\"生成用户字符串识别的切分字符串\"\n )\n parser.add_argument(\n \"-o\",\n \"--output_dir\",\n type=str,\n nargs=\"?\",\n help=\"The output directory\",\n default=\"output/\"\n )\n parser.add_argument(\n \"-i\",\n \"--input_file\",\n type=str,\n nargs=\"?\",\n help=\"When set, this argument uses a specified text file as source for the text\",\n default=\"\",\n required=True\n )\n parser.add_argument(\n \"-mi\",\n \"--min_char_count\",\n type=int,\n nargs=\"?\",\n help=\"The minimum number of characters per line, Default is 3.\",\n default=3,\n\n )\n parser.add_argument(\n \"-ma\",\n \"--max_char_count\",\n type=int,\n nargs=\"?\",\n help=\"The maximum number of characters per line, Default is 20.\",\n default=20,\n )\n return parser.parse_args()", "def add_args(parser):\n add_encoder_args(parser)\n add_decoder_args(parser)", "def get_cert_kwargs(self) -> RequestKwargsType:\n kwargs: RequestKwargsType = {}\n\n if is_local(env=self.milmove_env):\n kwargs.update(deepcopy(LOCAL_TLS_CERT_KWARGS))\n else:\n kwargs[\"cert\"] = DP3_CERT_KEY_PEM\n\n if \"verify\" in kwargs and not kwargs[\"verify\"]:\n urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)\n\n return kwargs", "def process_arguments():\r\n # Create ArgumentParser object. Description message will be displayed as part of help message if script is run with -h flag\r\n parser = argparse.ArgumentParser(description='Downloads summary of findings for given interpretation request')\r\n # Define the arguments that will be taken.\r\n parser.add_argument('--ir_id', required=True, help='Interpretation request ID')\r\n parser.add_argument('--ir_version', required=True, help='Interpretation request version')\r\n parser.add_argument('-o', '--output_file', required=True, help='Output PDF')\r\n parser.add_argument('--header', required=False, help='Text for header of report')\r\n # Return the arguments\r\n return parser.parse_args()", "def get_args():\n parser = argparse.ArgumentParser(description='Download genome')\n parser.add_argument(\"accession\", help=\"NCBI Accession Number\")\n parser.add_argument(\"email\", help=\"E-mail address\")\n return parser.parse_args()", "def get_kwargs():\n\treturn get_kwargs_raw(sys.argv)", "def _parse_args():\n parser = argparse.ArgumentParser(description=\"\")\n parser.add_argument('password' , type=bytearray)\n parser.add_argument('authenticator' , type=bytearray)\n parser.add_argument('encrypted_password' , type=bytearray)\n\n return parser.parse_args()", "def parse_arguments():\n custom_config = config.read()\n arguments = docopt(__doc__, version='Montanus %s' % __version__)\n logger.debug(custom_config)\n conf_file = arguments.get('--with-conf')\n if conf_file is not None:\n conf_config = config.read(conf_file)\n\n for (k, v) in conf_config.items():\n if v is not None:\n custom_config[k] = v\n\n logger.debug(arguments)\n command_config = {\n 'templates_path': arguments.get('<templates_path>'),\n 'static_files_path': arguments.get('--with-static-files-path') \\\n if arguments.get('-with-static-files-path') is not None \\\n else arguments.get('<templates_path>'),\n 'delete_source': arguments.get('--delete'),\n 'protocol': arguments.get('--with-protocol'),\n 'domains': arguments.get('--with-domains').split(',') \\\n if arguments.get('--with-domains') is not None \\\n else None,\n 'md5_len': int(arguments.get('--with-md5-len')),\n 'md5_concat_by': arguments.get('--with-md5-concat-by')\n }\n logger.debug(command_config)\n\n for (k, v) in command_config.items():\n if v is not None:\n custom_config[k] = v\n\n logger.debug(custom_config)\n return DictWrapper(custom_config)", "def __get_cli_args():\r\n parser = argparse.ArgumentParser()\r\n o = parser.add_mutually_exclusive_group()\r\n o.add_argument('-a', action='store_true')\r\n o.add_argument('-b', action='store_true')\r\n parser.add_argument('-suite', help='suite file name for execution')\r\n parser.add_argument('-log', help='LOG level for the execution', default='INFO',\r\n choices=['INFO', 'DEBUG', 'WARNING', 'ERROR', 'CRITICAL'])\r\n args = parser.parse_args()\r\n return args", "def main(argv):\n\n valid = [\"proxy=\", \"myproxy=\", \"mail=\", \"send-mail=\", \"time=\", \"verbose\", \"help\"]\n ### // Default values\n proxy = os.getenv('X509_USER_PROXY')\n myproxy = False\n verbose = False\n mail = os.getenv('USER')\n sendMail = True\n time = 3\n\n try:\n opts, _args = getopt.getopt(argv, \"\", valid)\n except getopt.GetoptError as ex:\n print(\"Options: {}\\n\\nException: {}\".format(main.__doc__, str(ex)))\n sys.exit(1)\n\n ### // Handle arguments given in the command line\n for opt, arg in opts:\n if opt == \"--help\":\n print(main.__doc__)\n sys.exit(0)\n if opt == \"--proxy\":\n proxy = arg\n if proxy.startswith(\"~/\"):\n proxy = os.getenv('HOME') + proxy[1:]\n if not os.path.exists(proxy):\n print(\"Proxy File does not exist\")\n sys.exit(2)\n if opt == \"--mail\":\n mail = arg\n if opt == \"--myproxy\":\n myproxy = arg\n if opt == \"--send-mail\":\n sendMail = arg\n if opt == \"--time\":\n time = int(arg)\n if time < 1:\n print(\"Invalid time format. Check the options: {}\".format(main.__doc__))\n raise sys.exit(3)\n if opt == \"--verbose\":\n verbose = True\n\n command = [\"voms-proxy-info\", \"-file\", str(proxy)]\n p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n out, _err = p.communicate()\n proxyInfo = [line for line in out.split('\\n') if line]\n processTimeLeft(sendMail, verbose, proxyInfo, time, mail)\n\n if myproxy:\n os.environ[\"X509_USER_CERT\"] = proxy\n os.environ[\"X509_USER_KEY\"] = proxy\n command = [\"myproxy-info\", \"-v\", \"-l\", \"amaltaro\", \"-s\", \"myproxy.cern.ch\", \"-k\", \"amaltaroCERN\"]\n p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n out, err = p.communicate()\n proxyInfo = [line for line in out.split('\\n') if line]\n processTimeLeft(sendMail, verbose, proxyInfo, time, mail)", "def clean_command_line(args):\n args = vars(args)\n # solo devuelvo los items que tienen datos en el runstring\n ret = {}\n for item in args:\n if args[item]:\n ret[item] = args[item]\n return ret", "def setup_args(cls, parser):\n pass", "def oic_pre_construct(self, cli_info, request_args=None, **kwargs):\n for prop in self.msg_type.c_param.keys():\n if prop in request_args:\n continue\n try:\n request_args[prop] = cli_info.behaviour[prop]\n except KeyError:\n pass\n\n if \"post_logout_redirect_uris\" not in request_args:\n try:\n request_args[\n \"post_logout_redirect_uris\"] = \\\n cli_info.post_logout_redirect_uris\n except AttributeError:\n pass\n\n if \"redirect_uris\" not in request_args:\n try:\n request_args[\"redirect_uris\"] = cli_info.redirect_uris\n except AttributeError:\n raise MissingRequiredAttribute(\"redirect_uris\", request_args)\n\n try:\n if cli_info.provider_info[\n 'require_request_uri_registration'] is True:\n request_args['request_uris'] = cli_info.generate_request_uris(\n cli_info.requests_dir)\n except KeyError:\n pass\n\n return request_args, {}", "def commandline_options():\n parser = argparse.ArgumentParser(\n description='ocn_diags_generator: CESM wrapper python program for Ocean Diagnostics packages.')\n\n parser.add_argument('--backtrace', action='store_true',\n help='show exception backtraces as extra debugging '\n 'output')\n\n parser.add_argument('--debug', action='store_true',\n help='extra debugging output')\n\n #parser.add_argument('--config', nargs=1, required=True, help='path to config file')\n\n options = parser.parse_args()\n return options", "def get_args():\n\n parser = argparse.ArgumentParser(description=\"Add a (sub)tree from a vCenter's structure to the Nuage vCenter Deployment Tool. This can be done by specifying the datacenters, clusters and hosts you want to add. You can also specify to include all datacenters and/or clusters and/or hosts, depending on your requirements. It is also possible to provide a CSV file containing the hosts to add and each hosts specific configuration. Creation will only happen if the entity doesn't exist yet in the vCenter Deployment Tool. Hosts will be updated with the new configuration if you run the script with already existsing hosts. This script is also capable of updating the ESXi Hosts Agent VM settings.\")\n parser.add_argument('--all-clusters', required=False, help='Configure all Clusters from the selected vCenter Datacenters', dest='all_clusters', action='store_true')\n parser.add_argument('--all-datacenters', required=False, help='Configure all vCenter Datacenters from the vCenter', dest='all_datacenters', action='store_true')\n parser.add_argument('--all-hosts', required=False, help='Configure all Hosts from the selected Clusters', dest='all_hosts', action='store_true')\n parser.add_argument('--cluster', required=False, help='Cluster that has to be present in the Nuage vCenter Deployment Tool (can be specified multiple times)', dest='clusters', type=str, action='append')\n parser.add_argument('-d', '--debug', required=False, help='Enable debug output', dest='debug', action='store_true')\n parser.add_argument('-f', '--allow-fqdn', required=False, help='Allow the use of FQDN in the CSV hosts file instead of IP', dest='allow_fqdn', action='store_true')\n parser.add_argument('--datacenter', required=False, help='Datacenter that has to be present in the Nuage vCenter Deployment Tool (can be specified multiple times)', dest='datacenters', type=str, action='append')\n parser.add_argument('--host', required=False, help='Host IPs that has to be present in the Nuage vCenter Deployment Tool (can be specified multiple times)', dest='hosts', type=str, action='append')\n parser.add_argument('--host-configure-agent', required=False, help='Configure the VM Agent settings of the vCenter Hosts. It will configure the Management network you specify as an argument with --hv-management-network, or the one in the CSV file if specified. For datastore it will use the first available local datastore, or the one specified in the CSV file if provided.', dest='host_configure_agent', action='store_true')\n parser.add_argument('--hosts-file', required=False, help='CSV file which contains the configuration for each hypervisor', dest='hosts_file', type=str)\n parser.add_argument('--hv-user', required=True, help='The ESXi (default) hosts username', dest='hv_username', type=str)\n parser.add_argument('--hv-password', required=False, help='The ESXi hosts password. If not specified, the user is prompted at runtime for a password', dest='hv_password', type=str)\n parser.add_argument('--hv-management-network', required=True, help='The ESXi hosts management network', dest='hv_management_network', type=str)\n parser.add_argument('--hv-data-network', required=True, help='The ESXi hosts data network', dest='hv_data_network', type=str)\n parser.add_argument('--hv-vm-network', required=True, help='The ESXi hosts VM network', dest='hv_vm_network', type=str)\n parser.add_argument('--hv-mc-network', required=True, help='The ESXi hosts Multicast Source network', dest='hv_mc_network', type=str)\n parser.add_argument('-l', '--log-file', required=False, help='File to log to (default = stdout)', dest='logfile', type=str)\n parser.add_argument('--nuage-enterprise', required=True, help='The enterprise with which to connect to the Nuage VSD/SDK host', dest='nuage_enterprise', type=str)\n parser.add_argument('--nuage-host', required=True, help='The Nuage VSD/SDK endpoint to connect to', dest='nuage_host', type=str)\n parser.add_argument('--nuage-port', required=False, help='The Nuage VSD/SDK server port to connect to (default = 8443)', dest='nuage_port', type=int, default=8443)\n parser.add_argument('--nuage-password', required=False, help='The password with which to connect to the Nuage VSD/SDK host. If not specified, the user is prompted at runtime for a password', dest='nuage_password', type=str)\n parser.add_argument('--nuage-user', required=True, help='The username with which to connect to the Nuage VSD/SDK host', dest='nuage_username', type=str)\n parser.add_argument('--nuage-vrs-ovf', required=False, help='The URL of the VRS OVF file', dest='nuage_vrs_ovf', type=str)\n parser.add_argument('-S', '--disable-SSL-certificate-verification', required=False, help='Disable SSL certificate verification on connect', dest='nosslcheck', action='store_true')\n parser.add_argument('-v', '--verbose', required=False, help='Enable verbose output', dest='verbose', action='store_true')\n parser.add_argument('--vcenter-host', required=True, help='The vCenter server to connect to, use the IP', dest='vcenter_host', type=str)\n parser.add_argument('--vcenter-name', required=False, help='The name of the vCenter you want in the vCenter Deployment Tool', dest='vcenter_name', type=str)\n parser.add_argument('--vcenter-http-port', required=False, help='The vCenter server HTTP port to connect to (default = 80)', dest='vcenter_http_port', type=int, default=80)\n parser.add_argument('--vcenter-https-port', required=False, help='The vCenter server HTTPS port to connect to (default = 443)', dest='vcenter_https_port', type=int, default=443)\n parser.add_argument('--vcenter-password', required=False, help='The password with which to connect to the vCenter host. If not specified, the user is prompted at runtime for a password', dest='vcenter_password', type=str)\n parser.add_argument('--vcenter-user', required=True, help='The username with which to connect to the vCenter host', dest='vcenter_username', type=str)\n args = parser.parse_args()\n return args", "def ReadArguments():\n\n args = ParseArguments()\n\n logging.info('Command line arguments...')\n for arg in vars(args):\n logging.info(str(arg) + ': ' + str(getattr(args, arg)))\n logging.info('')\n\n IsTest(args)\n ProcessCacheSize(args)\n ProcessLineSize(args)\n ProcessMulti(args)\n ProcessMemPattern(args)\n ProcessMemFile(args)", "def parse_cmdline():\n\tparser = ArgumentParser(prog=\"FastP_QC.py\", description=\"\"\"Script collects stats from fastp jsons.\"\"\")\n\tparser.add_argument(\"-r1\", \"--r1_stats\", dest=\"r1_stats\", action=\"store\", required=True, help=\"Text file with r1 stats, from q30.py script.\")\n\tparser.add_argument(\"-r2\", \"--r2_stats\", dest=\"r2_stats\", action=\"store\", required=True, help=\"Text file with r2 stats, from q30.py script.\")\n\tparser.add_argument(\"-n\", \"--name\", dest=\"name\", action=\"store\", required=True, help=\"Sample name\")\n\targs = parser.parse_args()\n\treturn args", "def cmd_arguments():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"-i\", \"--image\",\n help=\"Full image path can be optionally supplied.\")\n args = parser.parse_args()\n return args", "def map_arguments():\n arguments = {\n '-c': 'ogg',\n '-d': 'no',\n '-q': '4'\n }\n args = sys.argv[:]\n args.pop(0)\n while len(args) > 1:\n if args[0] == '-c' and re.search('^mp3$|^ogg$', args[1]) or \\\n args[0] == '-d' and re.search('^y(es)?$', args[1]) or \\\n args[0] == '-q' and re.search('^[0-9]$', args[1]):\n arguments[args[0]] = args[1]\n args.pop(0)\n args.pop(0)\n else:\n print_help()\n if len(args) == 1:\n print_help()\n return arguments", "def get_args_from_command_line():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--country_code\", type=str,\n help=\"Country code\",\n default=\"US\")\n parser.add_argument(\"--n_workers\", type=int, help=\"number of workers\",\n default=20)\n parser.add_argument(\"--survey_link\", type=str)\n parser.add_argument(\"--block_size\", help='number of tweets per worker', type=int)\n parser.add_argument(\"--version_number\", type=str)\n parser.add_argument(\"--mode\", type=str, help='Whether to create HIT in sandbox or in production')\n parser.add_argument(\"--language_qualification\", type=int, help='')\n\n args = parser.parse_args()\n return args", "def ParseCommandArguments(args):\n\n\n\n import argparse\n from google.appengine.tools import boolean_action\n\n parser = argparse.ArgumentParser()\n parser.add_argument('-A', '--application', required=True)\n parser.add_argument('--api_host', default='')\n\n parser.add_argument('--api_port', default=8000, type=int)\n parser.add_argument('--trusted',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n parser.add_argument('--application_root', default=None)\n parser.add_argument('--application_host', default='localhost')\n parser.add_argument('--application_port', default=None)\n\n\n parser.add_argument('--blobstore_path', default=None)\n\n\n parser.add_argument('--datastore_path', default=None)\n\n parser.add_argument('--auto_id_policy', default='scattered',\n type=lambda s: s.lower(),\n choices=(datastore_stub_util.SEQUENTIAL,\n datastore_stub_util.SCATTERED))\n\n parser.add_argument('--use_sqlite',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n parser.add_argument('--high_replication',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n parser.add_argument('--require_indexes',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n parser.add_argument('--clear_datastore',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n\n\n parser.add_argument('--logs_path', default=None)\n\n\n parser.add_argument('--enable_sendmail',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n parser.add_argument('--smtp_host', default='')\n\n parser.add_argument('--smtp_port', default=25, type=int)\n parser.add_argument('--smtp_user', default='')\n parser.add_argument('--smtp_password', default='')\n parser.add_argument('--show_mail_body',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n\n\n parser.add_argument('--prospective_search_path', default=None)\n parser.add_argument('--clear_prospective_search',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n\n\n parser.add_argument('--enable_task_running',\n action=boolean_action.BooleanAction,\n const=True,\n default=True)\n\n parser.add_argument('--task_retry_seconds', default=30, type=int)\n\n\n parser.add_argument('--user_login_url', default=None)\n parser.add_argument('--user_logout_url', default=None)\n\n return parser.parse_args(args)", "def add_cmdline_arguments_to_browser(self, browser_capabilities, config_section):\n\n browser_name = self.shishito_support.get_opt(config_section, 'browser').lower()\n arguments = self.get_browser_arguments(config_section)\n if arguments:\n try:\n options_kw = BROWSER_KEYWORDS[browser_name][OPTIONS]\n args_kw = BROWSER_KEYWORDS[browser_name][ARGUMENTS]\n browser_capabilities.setdefault(options_kw, {}).setdefault(args_kw, []).extend(arguments)\n except:\n pass", "def AddCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server.\n \"\"\"\n parser.add_argument('--certificate', help=help_text, required=required)", "def add_cli_args(parser):\n parser.add_argument(\n '--raw_path',\n help='Source path where audio data files are stored',\n default=RAW_DATA_PATH\n )\n parser.add_argument(\n '--features_path',\n help='Output path where exported data will be placed',\n default=FEATURES_DATA_PATH\n )\n parser.add_argument(\n '--feature',\n help='name of the feature to be extracted (options: mfsc, leglaive)',\n default=VoiceActivationFrameSelectionFeatureExtractor.feature_name\n )", "def get_arguments(self):\n args = self.parser.parse_args()\n config = None\n with open(args.config_file, \"r\") as f:\n config = json.load(f)\n\n if \"collections\" in config:\n if len(config[\"collections\"]) > 0:\n collection = config[\"collections\"][0]\n if \"collection_name\" in collection:\n self.collection_name = collection[\"collection_name\"]\n else:\n raise AttributeError(\n \"'collection_name' not present in 'collections'!\"\n )\n self.slack_channel = collection[\"slack_channel\"]\n else:\n raise AttributeError(\n \"No 'collections' details found in config_file!\"\n )\n else:\n raise AttributeError(\"'collections' not present in config_file!\")\n\n if \"postman_api_key\" in config:\n self.postman_api_key = config[\"postman_api_key\"]\n else:\n raise AttributeError(\n \"'postman_api_key' not present in config_file!\"\n )\n\n if \"trigger_interval\" in config:\n self.trigger_interval = config[\"trigger_interval\"]\n\n if \"slack_token\" in config:\n self.slack_token = config[\"slack_token\"]\n else:\n raise AttributeError(\"'slack_token' not present in config_file!\")\n\n return (\n self.collection_name,\n self.postman_api_key,\n self.trigger_interval,\n self.slack_channel,\n self.slack_token,\n )", "def cmdline(self, executable, options, task, rlimits):\n data_model_param = get_data_model_from_task(task, {ILP32: \"-m32\", LP64: \"-m64\"})\n print(options)\n if data_model_param and not any(\n option.startswith(\"--clang-options=\") for option in options\n ):\n options += [\"--clang-options=\" + data_model_param]\n\n if task.property_file:\n options += [\"--svcomp-property\", task.property_file]\n else:\n raise UnsupportedFeatureException(\n \"SMACK can't execute without a property file.\"\n )\n\n options += [task.single_input_file]\n\n return [executable] + options", "def _parse_command_line_arguments():\n global config\n # Get command line args for vispy\n argnames = ['vispy-backend=', 'vispy-gl-debug', 'vispy-glir-file=',\n 'vispy-log=', 'vispy-help', 'vispy-profile=', 'vispy-cprofile',\n 'vispy-dpi=', 'vispy-audit-tests']\n try:\n opts, args = getopt.getopt(sys.argv[1:], '', argnames)\n except getopt.GetoptError:\n opts = []\n # Use them to set the config values\n for o, a in opts:\n if o.startswith('--vispy'):\n if o == '--vispy-backend':\n config['default_backend'] = a\n logger.info('vispy backend: %s', a)\n elif o == '--vispy-gl-debug':\n config['gl_debug'] = True\n elif o == '--vispy-glir-file':\n config['glir_file'] = a\n elif o == '--vispy-log':\n if ',' in a:\n verbose, match = a.split(',')\n else:\n verbose = a\n match = None\n config['logging_level'] = a\n set_log_level(verbose, match)\n elif o == '--vispy-profile':\n config['profile'] = a\n elif o == '--vispy-cprofile':\n _enable_profiling()\n elif o == '--vispy-help':\n print(VISPY_HELP)\n elif o == '--vispy-dpi':\n config['dpi'] = int(a)\n elif o == '--vispy-audit-tests':\n config['audit_tests'] = True\n else:\n logger.warning(\"Unsupported vispy flag: %s\" % o)", "def parse_arguments():\n\n parser = argparse.ArgumentParser(\n prog='choppy', description='chop -> encrypt -> (?) -> decrypt -> merge',\n allow_abbrev=False)\n\n parser.set_defaults(kw='', pw='')\n parser.set_defaults(passwordfile=None, keyfile=None, kp_file=None)\n parser.set_defaults(use_pw=False, use_key=False)\n\n subparsers = parser.add_subparsers(\n dest='command', metavar='(chop | merge | derive | gen)',\n help='see docs/usage for more information')\n\n chop_aliases = ['chp', 'c']\n merge_aliases = ['mrg', 'm']\n derive_aliases = ['der', 'd']\n gen_aliases = ['gen', 'g']\n\n cmds = ('chop', 'merge', 'derive', 'generate')\n cmd_alias = (chop_aliases, merge_aliases, derive_aliases, gen_aliases)\n cmd_map = dict(zip(cmds, cmd_alias))\n\n chp = subparsers.add_parser('chop', aliases=chop_aliases)\n mrg = subparsers.add_parser('merge', aliases=merge_aliases)\n derkey = subparsers.add_parser('derive', aliases=derive_aliases)\n gen_util = subparsers.add_parser('generate', aliases=gen_aliases)\n\n # --------------------------------------------------------------------------\n chop_grp = chp.add_argument_group('Chop')\n\n chop_grp.add_argument(\n 'input', nargs='+', type=argparse.FileType('rb'), metavar='infile',\n help='input file(s) to chop and encrypt')\n\n chop_grp.add_argument(\n '-n', type=int, default=10, dest='partitions', metavar='n',\n help='create n partitions from each input file - default: 10')\n\n chop_grp.add_argument(\n '-w', '--wobble', type=int, default=0, metavar='n', choices=range(1, 100),\n help='randomize partition size (1-99)')\n\n chop_grp.add_argument(\n '-r', '--randfn', action='store_true',\n help='use random file names for partitions instead of sequential numeric')\n\n load_keypass_options(chp, pfx='en')\n\n # --------------------------------------------------------------------------\n mrg_grp = mrg.add_argument_group('Merge')\n\n mrg_grp.add_argument(\n 'input', nargs='+', type=argparse.FileType('rb'), metavar='infile',\n help='input files to decrypt and merge')\n\n load_keypass_options(mrg, pfx='de')\n\n # --------------------------------------------------------------------------\n load_pw_options(derkey, pw_only=True)\n\n # --------------------------------------------------------------------------\n gen_grp = gen_util.add_argument_group('Utilities')\n\n gen_grp.add_argument(\n '-k', '--key', action='store_true', dest='genkey',\n help='write file containing randomly generated base64 encoded 32 byte key')\n\n gen_grp.add_argument(\n '-p', '--pw', type=int, default=0, metavar='n', dest='genpw',\n help='write file containing randomly generated password of n characters')\n\n gen_grp.add_argument(\n '-s', '--salt', type=int, default=0, metavar='n', dest='gensalt',\n help='write file containing randomly generated salt of n bytes - Standard: 32')\n\n gen_grp.add_argument(\n '-r', '--repeat', type=int, default=1, metavar='n',\n help='generate n files per command')\n\n # --------------------------------------------------------------------------\n for grp in (chp, mrg, derkey, gen_util):\n grp.add_argument(\n '-o', '--outdir', type=validate_directory, default=os.getcwd(),\n metavar='dir', help='output directory')\n\n grp.add_argument(\n '-q', '--quiet', action='store_true',\n help='disable all console text output')\n\n\n parser.add_argument('-v', '--version', action='version', version=VERSION)\n\n args = parser.parse_args()\n\n if args.command not in cmd_map:\n for k, v in cmd_map.items():\n if args.command in v:\n args.command = k\n break\n\n if args.command != 'generate':\n\n if args.use_key:\n if not args.kp_file:\n args.kw = getpass.getpass(prompt='Key: ')\n else:\n args.keyfile = args.kp_file\n\n elif args.use_pw or args.command == 'derive':\n args.use_pw = True\n\n if not args.salt:\n print('>>> salt file required for password use')\n sys.exit(0)\n\n if not args.kp_file:\n args.pw = getpass.getpass(prompt='Password: ')\n else:\n args.passwordfile = args.kp_file\n\n return args", "def _ParseCommandArguments():\n arg_parser = argparse.ArgumentParser()\n arg_parser.usage = __doc__\n\n arg_parser.add_argument('--download-dir',\n type=str,\n required=True,\n help='Directory into which corpora are downloaded.')\n arg_parser.add_argument('--build-dir',\n required=True,\n type=str,\n help='Directory where fuzzers were built.')\n args = arg_parser.parse_args()\n return args", "def prepare_args(config, bootstrap):\n config = copy.deepcopy(config)\n environ = dict(copy.deepcopy(os.environ))\n\n data = {'env': bootstrap['env'],\n 'pip': pip_cmd(bootstrap['env'], '', return_path=True),\n 'requirements': bootstrap['requirements']}\n environ.update(data)\n\n if isinstance(config, string_types):\n return config.format(**environ)\n\n for key, value in iteritems(config):\n if not isinstance(value, string_types):\n continue\n config[key] = value.format(**environ)\n\n return config_to_args(config)", "def parse_args():\n\tparser = argparse.ArgumentParser(description=\"comparing proguard-generated and predict mappings\")\n\tparser.add_argument(\"--proguard\", action=\"store\", dest=\"proguard_mappings_dir\",\n\t\t\t\t\t\trequired=True, help=\"directory of proguard-generated mappings file\")\n\tparser.add_argument(\"--predict\", action=\"store\", dest=\"predict_mappings_dir\",\n\t\t\t\t\t\trequired=True, help=\"directory of predict mappings file\")\n\tparser.add_argument(\"-o\", action=\"store\", dest=\"report_path\",\n\t\t\t\t\t\trequired=True, help=\"directory of report file\")\n\n\toptions = parser.parse_args()\n\tprint options\n\treturn options" ]
[ "0.650702", "0.6123743", "0.60399014", "0.60012686", "0.59843737", "0.57966566", "0.5721231", "0.5682582", "0.5598154", "0.5594984", "0.5591436", "0.5585888", "0.55673426", "0.55233204", "0.5516206", "0.5511681", "0.55067706", "0.54973185", "0.54691166", "0.545013", "0.54430896", "0.5442972", "0.54372233", "0.54333884", "0.54280394", "0.5424676", "0.54175377", "0.54110724", "0.5393484", "0.53847396", "0.53829515", "0.5364491", "0.53453684", "0.5342584", "0.5336463", "0.53338253", "0.5331669", "0.5295457", "0.5286611", "0.5286259", "0.5286103", "0.5283166", "0.5263534", "0.5259857", "0.5258857", "0.525585", "0.525398", "0.5242158", "0.52403504", "0.5237348", "0.5236007", "0.5228599", "0.52242404", "0.52233577", "0.52214104", "0.52137136", "0.5202222", "0.51981425", "0.5177421", "0.51773745", "0.51761585", "0.5172034", "0.516543", "0.5165399", "0.51620895", "0.51599634", "0.51591736", "0.5151041", "0.5145206", "0.5144904", "0.5141969", "0.5139952", "0.5138321", "0.51361483", "0.5129426", "0.5124057", "0.51230854", "0.5118608", "0.5114215", "0.5113259", "0.5109953", "0.51068586", "0.510322", "0.5102547", "0.51005495", "0.50974274", "0.5087206", "0.5079762", "0.50764215", "0.5073703", "0.50689703", "0.506681", "0.5066562", "0.50665283", "0.5065571", "0.5062619", "0.50553656", "0.5051947", "0.50482893", "0.50470406" ]
0.6634239
0
Sign with already loaded parameters.
def do_sign(subject_csr, issuer_obj, issuer_key, days, path_length, reqInfo, reset_info=None): # Certificate duration if days is None: die("Need --days") if days <= 0: die("Invalid --days") # Load CA info issuer_info = CertInfo(load=issuer_obj) # Load certificate request subject_info = CertInfo(load=subject_csr) if reset_info: subject_info = reset_info # Check CA parameters if not same_pubkey(subject_csr, issuer_obj): if not issuer_info.ca: die("Issuer must be CA.") if 'key_cert_sign' not in issuer_info.usage: die("Issuer CA is not allowed to sign certs.") if subject_info.ca: if not same_pubkey(subject_csr, issuer_obj): # not self-signing, check depth if issuer_info.path_length == 0: die("Issuer cannot sign sub-CAs") if issuer_info.path_length - 1 < path_length: die("--path-length not allowed by issuer") # Load subject's public key, check sanity pkey = subject_csr.public_key() if isinstance(pkey, ec.EllipticCurvePublicKey): pkeyinfo = 'ec:' + str(pkey.curve.name) if pkey.curve.name not in EC_CURVES: die("Curve not allowed: %s", pkey.curve.name) elif isinstance(pkey, rsa.RSAPublicKey): pkeyinfo = 'rsa:' + str(pkey.key_size) if pkey.key_size < MIN_RSA_BITS or pkey.key_size > MAX_RSA_BITS: die("RSA size not allowed: %s", pkey.key_size) else: die("Unsupported public key: %s", str(pkey)) # Report if subject_info.ca: msg('Signing CA cert [%s] - %s', pkeyinfo, reqInfo) else: msg('Signing end-entity cert [%s] - %s', pkeyinfo, reqInfo) msg('Issuer name: %s', render_name(issuer_info.subject)) msg('Subject:') subject_info.show(msg_show) # Load CA private key if not same_pubkey(issuer_key, issuer_obj): die("--ca-private-key does not match --ca-info data") # Stamp request cert = create_x509_cert(issuer_key, subject_csr.public_key(), subject_info, issuer_info, days=days) return cert
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sign(self, payload):\n raise NotImplementedError", "def signSign(self):\r\n if \"signature\" in self: # or \"signatures\" in self ?\r\n self.pop(\"id\", False)\r\n try:\r\n self[\"signSignature\"] = dposlib.core.crypto.getSignature(\r\n self, self._secondPrivateKey,\r\n exclude_second_sig=True,\r\n )\r\n except AttributeError:\r\n raise Exception(\"no second private Key available\")\r\n else:\r\n raise Exception(\"transaction not signed\")", "def sign(self, object):\n pass", "def sign(self, body, external_aad, private_key):", "def sign(self):\n private_key = serialization.load_pem_private_key(\n binascii.unhexlify(self.sender_private_key.encode('utf8')),\n password=None,\n backend=default_backend()\n )\n signature = private_key.sign(\n str(self.to_dict()).encode('utf8'),\n padding.PSS(\n mgf=padding.MGF1(hashes.SHA256()),\n salt_length=padding.PSS.MAX_LENGTH\n ),\n hashes.SHA256()\n )\n\n return signature", "def sign(params, signed_fields_key='orderPage_signedFields', full_sig_key='orderPage_signaturePublic'):\r\n merchant_id = settings.CC_PROCESSOR['CyberSource'].get('MERCHANT_ID', '')\r\n order_page_version = settings.CC_PROCESSOR['CyberSource'].get('ORDERPAGE_VERSION', '7')\r\n serial_number = settings.CC_PROCESSOR['CyberSource'].get('SERIAL_NUMBER', '')\r\n\r\n params['merchantID'] = merchant_id\r\n params['orderPage_timestamp'] = int(time.time() * 1000)\r\n params['orderPage_version'] = order_page_version\r\n params['orderPage_serialNumber'] = serial_number\r\n fields = u\",\".join(params.keys())\r\n values = u\",\".join([u\"{0}={1}\".format(i, params[i]) for i in params.keys()])\r\n fields_sig = processor_hash(fields)\r\n values += u\",signedFieldsPublicSignature=\" + fields_sig\r\n params[full_sig_key] = processor_hash(values)\r\n params[signed_fields_key] = fields\r\n\r\n return params", "def sign(self):\r\n self._reset()\r\n if hasattr(self, \"_privateKey\"):\r\n if \"fee\" not in self:\r\n setFees(self)\r\n if self.type == 4:\r\n missings = \\\r\n self.asset[\"multiSignature\"][\"min\"] - \\\r\n len(self.get(\"signature\", []))\r\n if missings:\r\n raise Exception(\"owner signature missing (%d)\" % missings)\r\n self[\"signature\"] = dposlib.core.crypto.getSignature(\r\n self, self._privateKey\r\n )\r\n else:\r\n raise Exception(\"orphan transaction can not sign itsef\")", "def sign_vars(self, url, vars):\n vars[\"_signature\"] = self._sign(url, vars)", "def sign_transaction(self):\n private_key=RSA.importKey(binascii.unhexlify(self.sender_private_key))\n signer=PKCS1_v1_5.new(private_key)\n h=SHA.new(str(self.to_dict()).encode('utf8'))\n return binascii.hexlify(signer.sign(h)).decode('ascii')", "def _sign(self, data, salt):\r\n strBuffer = \"\"\r\n # print data.keys()\r\n for k in sorted(data.iterkeys()):\r\n\r\n # Handle the BOOL special case\r\n v = data[k]\r\n if type(v) == bool:\r\n if v:\r\n v = 1\r\n else:\r\n v = 0\r\n data[k] = v\r\n\r\n # Update buffer\r\n strBuffer += \"%s=%s\\n\" % (str(k).lower(), vmcp.myquote(str(v)))\r\n\r\n # Append salt\r\n strBuffer += salt\r\n return strBuffer", "def test_sign(self):\n self.signer.Sign(b'notadb')\n self.assertTrue(True)", "def get_signed(self, **payload):\n param = ''\n for k in payload:\n param += '&' + k + '=' + str(payload[k])\n param = param.lstrip('&')\n signature = hmac.new(self.secret, param, digestmod=hashlib.sha256).hexdigest()\n\n return signature", "def signWithSecret(self, secret):\r\n self.link(secret)\r\n self.sign()", "def signSignWithSecondSecret(self, secondSecret):\r\n self.link(None, secondSecret)\r\n self.signSign()", "def sign(self, params: Dict[str, Any]) -> str:\n\n assert self.secret is not None, \"A client secret is required to sign requests.\"\n\n query = urlencode(params)\n signature = hmac.new(self.secret.encode(), query.encode(), hashlib.sha512)\n\n return signature.hexdigest()", "def signSignWithKey(self, secondPrivateKey):\r\n self._secondPrivateKey = secondPrivateKey\r\n self.signSign()", "def sign(self, inputs):\n pass", "def sign(self, inputs):\n pass", "def sign_transaction():\n data = request.get_json()\n\n try:\n tx = Transaction.from_dict(data)\n except TypeError:\n response = dict(message='Improper transaction json provided.')\n status_code = 400\n return jsonify(response), status_code\n\n signature = tx.sign(node.wallet.private_key_rsa)\n response = dict(signature=signature)\n return jsonify(response), 200", "def setSign(self, *args):\n return _libsbml.Input_setSign(self, *args)", "def sign_transaction(self, private_key):\n\n to_be_hashed = (str(self.timestamp) +\n str(self.sender_address) +\n str(self.recipient_address) +\n str(self.amount) +\n # str(self.transaction_inputs) +\n # str(self.transaction_outputs) +\n str(self.transaction_id))\n\n # Create a hash value of the whole message\n sha_hash = SHA256.new(to_be_hashed.encode())\n\n # Import private key\n key = RSA.importKey(private_key)\n\n # print(sha_hash)\n\n # Construct an instance of the crypto object\n cipher = PKCS1_v1_5.new(key)\n\n # Create and return the signature\n self.transaction_signature = cipher.sign(sha_hash)", "def sign(self, data):\n from base64 import urlsafe_b64encode\n\n if self.sign_private == \"\":\n raise ValueError(\"Error signing: No private signing key found for {}\".format(self))\n\n key_private = RsaPrivateKey.Read(self.sign_private)\n signature = key_private.Sign(data)\n return urlsafe_b64encode(signature)", "def sign(self, privkey):\n seckey = CIoncoinSecret.from_secret_bytes(x(ioncointools.encode_privkey(privkey, \"hex\")))\n\n for i in range(len(self.tx.vin)):\n txin_scriptPubKey = self.tx.vin[i].scriptSig\n sighash = SignatureHash(txin_scriptPubKey, self.tx, i, SIGHASH_ALL)\n sig = seckey.sign(sighash) + struct.pack('<B', SIGHASH_ALL)\n self.tx.vin[i].scriptSig = CScript([sig, seckey.pub])\n\n VerifyScript(self.tx.vin[i].scriptSig, txin_scriptPubKey, self.tx, i, (SCRIPT_VERIFY_P2SH,))", "def _sign(self, cert, keypair, certs, crls, flags):\n\n # pylint: disable=W0201\n cms = self.POW_class()\n cms.sign(cert, keypair, self.encode(), certs, crls, self.econtent_oid, flags)\n self.POW = cms", "def _build_signature(self):\n sig_contents = \\\n self.payload + \".\" + \\\n b64encode(b\"application/xml\").decode(\"ascii\") + \".\" + \\\n b64encode(b\"base64url\").decode(\"ascii\") + \".\" + \\\n b64encode(b\"RSA-SHA256\").decode(\"ascii\")\n sig_hash = SHA256.new(sig_contents.encode(\"ascii\"))\n cipher = PKCS1_v1_5.new(self.private_key)\n sig = urlsafe_b64encode(cipher.sign(sig_hash))\n key_id = urlsafe_b64encode(bytes(self.author_handle, encoding=\"utf-8\"))\n return sig, key_id", "def sign(self, bytes):\r\n if not self.hasPrivateKey():\r\n raise AssertionError()\r\n paddedBytes = self._addPKCS1Padding(bytes, 1)\r\n m = bytesToNumber(paddedBytes)\r\n if m >= self.n:\r\n raise ValueError()\r\n c = self._rawPrivateKeyOp(m)\r\n sigBytes = numberToByteArray(c, numBytes(self.n))\r\n return sigBytes", "def sign(self, data):\n\n key_private = RsaPrivateKey.Read(self.sign_private)\n signature = key_private.Sign(data)\n return b64encode(signature)", "def signature(self, params):\n string = ''.join(key + params[key] for key in sorted(params.keys()))\n return md5(string + self.cfg('secret'))", "def sign(self, message, private_key):\n sk = private_key\n vk = sk.get_verifying_key()\n\n self.public_key = vk\n\n # This would be the Ed25519ph version (JavaScript ES7):\n # const message = crypto.createHash('sha512')\n # .update(Buffer.concat([this.messagePrefix, this.message]))\n # .digest()\n\n self.signature = sk.sign(message, encoding='bytes')", "def sign_transaction(self, transaction):\n try:\n address = transaction.from_address\n private_key = self.addresses[address]['private_key']\n transaction.sign_transaction(private_key)\n except Exception as ex:\n print(\"Error signing transaction from address: \" + address + \" \" + str(ex))", "def __init__(self, session=None, key=None, salt=b\"\", variables_to_sign=None):\n super().__init__() # Yes, I know that this currently doesn't do anything.\n self.session = session\n self.key = key or Session.SECRET\n self.salt = salt\n self.variables_to_sign = variables_to_sign or []\n assert \"_signature\" not in self.variables_to_sign", "def __sign(self, text):\n signature = HMAC.new(self.sign_key, text.encode('utf-8'), SHA256).digest()\n return base64.standard_b64encode(signature)", "def sign_from_args(self):\n return (True, False)", "def _sign(self, url, endpoint, endpoint_path, method_verb, *args, **kwargs):\n url = self._uri\n return url, {'params': {'test_param': \"authenticated_test_user\"}}", "def signWithKeys(self, publicKey, privateKey):\r\n if self.get(\"senderPublicKey\", None) != publicKey:\r\n self.senderPublicKey = publicKey\r\n self._privateKey = privateKey\r\n self.sign()", "def add_sign(self):\n if self.is_signed():\n self.remove_sign()\n \n data = self._document.read()\n encrypted = self._encryptor.encrypt_cbc(data, self._init_vector)\n hash_value = encrypted[-16:]\n self._document.write(self._seperator.encode() + hash_value + self._seperator.encode())\n print(\"The document is signed!\")", "def sign(data):\n return _make.sign(data)", "def sign(self, msg: Dict) -> Dict:\n ser = serialize_msg_for_signing(msg, topLevelKeysToIgnore=[f.SIG.nm,\n f.SIGS.nm])\n bsig = self.naclSigner.signature(ser)\n sig = base58.b58encode(bsig).decode(\"utf-8\")\n return sig", "def Sign(self, bytes_to_sign, logf=None):\r\n # Implements PKCS1-v1_5 w/SHA256 over the bytes, and returns\r\n # the result as a base64url encoded bignum.\r\n\r\n self._Log(logf, 'bytes_to_sign = [%s]' % bytes_to_sign.encode('hex'))\r\n\r\n self._Log(logf, 'keypair size : %s' % self.keypair.size())\r\n\r\n # Generate the PKCS1-v1_5 compatible message, which includes\r\n # magic ASN.1 bytes and padding:\r\n emsa_msg = self._MakeEmsaMessageSha256(bytes_to_sign, self.keypair.size(), logf)\r\n # TODO(jpanzer): Check whether we need to use max keysize above\r\n # or just keypair.size\r\n\r\n self._Log(logf, 'emsa_msg = [%s]' % emsa_msg.encode('hex'))\r\n\r\n # Compute the signature:\r\n signature_long = self.keypair.sign(emsa_msg, None)[0]\r\n\r\n # Encode the signature as armored text:\r\n signature_bytes = number.long_to_bytes(signature_long)\r\n\r\n self._Log(logf, 'signature_bytes = [%s]' % signature_bytes.encode('hex'))\r\n\r\n return base64.urlsafe_b64encode(signature_bytes).encode('utf-8')", "def sign(private_key: RsaKey, content: dict) -> None:\n\n signer = PKCS1_v1_5.new(private_key)\n encoded_content = json.dumps(content, sort_keys=True).encode()\n h = SHA256.new(encoded_content)\n signature = signer.sign(h)\n\n return binascii.hexlify(signature).decode('ascii')", "def sign(self, data: bytes) -> bytes:\n return self._signing_key.sign(data).signature", "def sign(self, message):\n\n # if not already a byte string turn it to making sure\n if not isinstance(message, (bytes, str)):\n return None\n elif isinstance(message, str):\n message = message.encode()\n\n hash_of_message = SHA256.new(message)\n\n signer = DSS.new(self.privkey, mode=\"fips-186-3\")\n\n digital_signature = signer.sign(hash_of_message)\n digital_signature = base64.b85encode(digital_signature).decode()\n\n return digital_signature", "def _encode_and_sign(self, dict_payload, encoding=\"ascii\"):\n payload_bytes = json.dumps(dict_payload).encode(encoding)\n b64 = base64.b64encode(payload_bytes)\n creds = self._api_credentials\n secret_bytes = creds.api_secret.encode(encoding)\n signature = hmac.new(secret_bytes, b64, sha384).hexdigest()\n return b64, signature", "def _sign_cert(self, cert):\n with open(self._get_key_link(self.commonname), 'r') as private_file:\n data = private_file.read()\n pkey = crypto.load_privatekey(crypto.FILETYPE_PEM,\n data)\n cert.sign(pkey, 'sha256')", "def Sign(self, data):\n return self.rsa_key.sign(data, padding.PKCS1v15(), utils.Prehashed(hashes.SHA1()))", "def sendToSign(cmd):\n\tsubprocess.call([\"./sign.sh\", cmd])", "def get_sign_data(self):\n # TODO: Include more members in sign data.\n data = apply_sha256(self.sender + self.receiver + str(self.amount))\n return data", "def sign(self, plaintext):\n return plaintext + self.compute_digest(plaintext)", "def _rsa_sign(blob, private_key_pem):\n # Lazy import crypto. It is not available in unit tests outside of sandbox.\n from Crypto.Hash import SHA256\n from Crypto.PublicKey import RSA\n from Crypto.Signature import PKCS1_v1_5\n pkey = RSA.importKey(private_key_pem)\n return PKCS1_v1_5.new(pkey).sign(SHA256.new(blob))", "def sign_tx(self, tx: payloads.Transaction, password: str, magic: Optional[int] = None) -> None:\n if magic is None:\n magic = settings.network.magic\n\n self._validate_tx(tx)\n\n message = magic.to_bytes(4, byteorder=\"little\", signed=False) + tx.hash().to_array()\n signature = self.sign(message, password)\n\n invocation_script = vm.ScriptBuilder().emit_push(signature).to_array()\n # mypy can't infer that the is_watchonly check ensures public_key has a value\n verification_script = contracts.Contract.create_signature_redeemscript(self.public_key) # type: ignore\n tx.witnesses.insert(0, payloads.Witness(invocation_script, verification_script))", "def sign(self, msg):\n z = int.from_bytes(helper.hash256(msg), \"big\")\n k = self.deterministic_k(z)\n k_inv = pow(k, N-2, N)\n r = (k*G).x.num\n s = (z + r * self.secret) * k_inv % N\n if s > N/2:\n s = N - s\n\n return Signature(r, s)", "def encrypt_data(self, params):\n from django.core.signing import dumps\n return dumps(params, salt=self.salt_namespace)", "def sign_file_dialog():\n if X is None or Y is None:\n raise Exception(ERRORS.INVALID_AUTH)\n\n file_path = input(\"Enter file path: \")\n signature_name = input(\"Enter signature identity: \")\n\n for c in signature_name:\n ascii_c = ord(c)\n if not ((ascii_c >= 65 and ascii_c <= 90) or (ascii_c >= 97 and ascii_c <= 122) or (ascii_c >= 48 and ascii_c <= 57) or ascii_c == 95):\n raise Exception(ERRORS.INVALID_SIGNATURE_NAME)\n\n if not os.path.exists(file_path):\n raise Exception(ERRORS.INVALID_FILE)\n \n with open(file_path, \"rb\") as file:\n file_hash = hashlib.sha256(file.read()).hexdigest()\n file_hash_int = int(file_hash, 16)\n \n k = random.randint(1, Q-1)\n r = 0\n while r==0:\n r = int(pow(G, k, P)) % Q\n s = (pow(k,Q-2,Q)*(file_hash_int + X%Q*r%Q)%Q) % Q\n\n with open(f\"{signature_name}.sig\", \"w\") as file:\n file.write(str(r) + \"\\n\")\n file.write(str(s) + \"\\n\")\n \n print(f\"Signature {signature_name}.sig created successfully.\")", "def signed(self, encoded):\n signature = self.sign(encoded)\n return encoded + signature", "def sign(sk: SecretKey, msgs: List[bytes]) -> Signature:\n assert(len(msgs) == len(sk.y))\n\n # pick generator\n h = G1.generator()\n exponent = sk.x + sum([y_i * Bn.from_binary(m_i)\n for (y_i, m_i) in zip(sk.y.values(), msgs)])\n\n return Signature(h, h**exponent) # type:ignore", "def multiSignWithSecret(self, secret):\r\n keys = dposlib.core.crypto.getKeys(secret)\r\n self.multiSignWithKey(keys[\"privateKey\"])", "def sign(self, digest):\n sig = self.private_key_obj.sign(digest, ec.ECDSA(utils.Prehashed(hashes.SHA256())))\n sig_rs = utils.decode_dss_signature(sig)\n sig_r = int.to_bytes(sig_rs[0], 32, \"big\")\n sig_s = int.to_bytes(sig_rs[1], 32, \"big\")\n return bytes(bytearray(sig_r)+bytearray(sig_s))", "def _sign(self, path, nonce, data):\n url = '/{0}/{1}'.format(self._version, path)\n urlencoded_data = urllib.urlencode(data)\n msg = url + hashlib.sha256(str(nonce) + urlencoded_data).digest()\n signature = hmac.new(base64.b64decode(self._secret), msg,\n hashlib.sha512)\n return base64.b64encode(signature.digest())", "def sign_partial(self, *partial_signers: Keypair) -> None:\n underlying_signers = [signer.to_solders() for signer in partial_signers]\n self._solders.partial_sign(underlying_signers, self._solders.message.recent_blockhash)", "def sign (self):\n print(\"*** signing the inno setup installer ***\")\n pfxfile = r'scripts\\%s.pfx' % self.lname\n if os.path.isfile(pfxfile):\n path = get_windows_sdk_path()\n signtool = os.path.join(path, \"bin\", \"signtool.exe\")\n if os.path.isfile(signtool):\n cmd = [signtool, 'sign', '/f', pfxfile, self.distfile]\n subprocess.check_call(cmd)\n else:\n print(\"No signed installer: signtool.exe not found.\")\n else:\n print(\"No signed installer: certificate %s not found.\" % pfxfile)", "def sign(self, *signers: Keypair) -> None:\n underlying_signers = [signer.to_solders() for signer in signers]\n self._solders.sign(underlying_signers, self._solders.message.recent_blockhash)", "def sign(cls,\n signable: Signable,\n hash_class: Type[Hash],\n key: Key) -> Signature:\n\n h = signable.hash(hash_class)\n digest_cipher = Rsa().encrypt(h.to_bytes(), key)\n\n return Signature(hash_class, digest_cipher)", "def Sign(self, msg):\n # Need to chose a random k per-message, SystemRandom() is available\n # since Python 2.4.\n k = random.SystemRandom().randint(2, self.key.q-1)\n (r, s) = self.key.sign(util.Hash(msg), k)\n return util.MakeDsaSig(r, s)", "def sign(self, message):\n return Signature(self._sk.sign(message))", "def sign_request(self, sign_request):\n\n self._sign_request = sign_request", "def make_signer(self, salt=None):\n if salt is None:\n salt = self.salt\n return self.signer(self.secret_key, salt=salt, **self.signer_kwargs)", "def Sign(self, msg):\n # Need to chose a random k per-message, SystemRandom() is available\n # since Python 2.4.\n k = random.SystemRandom().randint(2, self.key.q - 1)\n (r, s) = self.key.sign(util.Hash(msg), k)\n return util.MakeDsaSig(r, s)", "def Sign(self):\n return self.hmac.digest()", "def _sign(self, oauth_payload, request):\n\t\t# merge params\n\t\t# use oauth_payload to update request params might avoid \n\t\t# some oauth params's accidental overriding\n\t\tpayload = dict( request.params )\n\t\tpayload.update( oauth_payload )\n\n\t\t# here I assume that all keys contain only 'a-zA-Z_.-'\n\t\t# thus there is no necessity to percent-encode them\n\t\t# will now sort them according to their original value\n\n\t\tkeylist = sorted( payload.keys() )\n\t\trawlist = []\n\t\tfor k in keylist:\n\t\t\tencoded_value = percent_encode( payload[k] )\n\t\t\trawlist.append( \"%s=%s\" % (k, encoded_value) )\n\n\t\t# craft base string\n\t\tbase_string = request.method.upper()\n\t\tbase_string += '&'\n\t\tbase_string += percent_encode(request.base_url)\n\t\tbase_string += '&'\n\t\tbase_string += percent_encode( '&'.join( rawlist ) )\n\n\t\tself._print( \"Base string:\\n\" + base_string )\n\t\t# craft signing key\n\t\tif self.has_user():\n\t\t\tsigning_key = \"%s&%s\" % ( percent_encode(self.secret), percent_encode(self.a_secret) )\n\t\telse:\n\t\t\tsigning_key = \"%s&%s\" % ( percent_encode(self.secret), percent_encode(self.token_secret) )\n\n\t\t# sign base_string\n\t\thashed = hmac.new(signing_key, base_string, hashlib.sha1)\n\t\tsignature = binascii.b2a_base64(hashed.digest())[:-1]\n\t\t\n\t\t# append signature field\n\t\toauth_payload[\"oauth_signature\"] = signature\n\n\t\t# prepare relevant oauth values\n\t\toauth_entry = []\n\t\tfor k in oauth_payload.keys():\n\t\t\tencoded_value = percent_encode( oauth_payload[k] )\n\t\t\toauth_entry.append( '%s=\"%s\"' % (k, encoded_value) )\n\n\t\toauth_str = 'OAuth ' + ','.join(oauth_entry)\n\t\tself._print( \"OAuth header:\\n\" + oauth_str )\n\t\t# field crafted\n\t\treturn { \"Authorization\" : oauth_str }", "def sign(self):\n daskD.wait(self.client.map(_call_sign, self.vecDask, pure=False))\n return self", "def sign_cybersource_payload(payload):\n field_names = sorted(list(payload.keys()) + [\"signed_field_names\"])\n payload = {**payload, \"signed_field_names\": \",\".join(field_names)}\n return {**payload, \"signature\": generate_cybersource_sa_signature(payload)}", "def sign(key, file, sign):\n\n try:\n key = TomlKeyFormatter().from_string(key.read())\n signature = Signature.sign(SignableBinaryIO(file), Md5, key)\n\n sign.write(TomlSignatureFormatter().to_string(signature))\n\n except KeyFormatError:\n click.echo(\"ERROR: Key is in bad format\")", "def putSigned(key, ser, sig, dbn=\"core\", env=None, clobber=True):\n global gDbEnv\n\n if env is None:\n env = gDbEnv\n\n if env is None:\n raise DatabaseError(\"Database environment not set up\")\n\n keyb = key.encode(\"utf-8\")\n subDb = env.open_db(dbn.encode(\"utf-8\")) # open named sub db named dbn within env\n with env.begin(db=subDb, write=True) as txn: # txn is a Transaction object\n rsrcb = (ser + SEPARATOR + sig).encode(\"utf-8\") # keys and values must be bytes\n result = txn.put(keyb, rsrcb, overwrite=clobber )\n if not result:\n raise DatabaseError(\"Preexisting entry at key {}\".format(key))\n return True", "def get_signed(self, sig_str):\n sig_str = base64.b64encode(sig_str)\n signature = base64.b64encode(hmac.new(self.secret, sig_str, digestmod=hashlib.sha1).digest())\n return signature", "def sign_transaction(self, transaction, prvkey):\n return self.web3.eth.account.sign_transaction(transaction, prvkey)", "def setSignPDF(self, keystoreURL, keyAlias, keystorePassword, keystoreType, signingMode):\n self.PDFreactorConfiguration.in1[\"signPdfKeystoreURL\"] = keystoreURL\n self.PDFreactorConfiguration.in1[\"signPdfKeyAlias\"] = keyAlias\n self.PDFreactorConfiguration.in1[\"signPdfKeystorePassword\"] = keystorePassword\n self.PDFreactorConfiguration.in1[\"signPdfKeystoreType\"] = keystoreType\n self.PDFreactorConfiguration.in1[\"signPdfSigningMode\"] = signingMode", "def SIGN(self, signingKey, message, seqNum, cipher_encrypt):\n\t\treturn self.MAC(cipher_encrypt, signingKey, seqNum, message)", "def ecdsa_sign(G, priv_sign, message):\n plaintext = message.encode(\"utf8\")\n digest = sha256(plaintext).digest()\n sig = do_ecdsa_sign(G,priv_sign,digest)\n\n return sig", "def signrequest(self, signrequest):\n\n self._signrequest = signrequest", "def test_azure_sign(self):\n\n data = \"data\".encode(\"utf-8\")\n\n signer = Signer.from_priv_key_uri(self.azure_id, self.azure_pubkey)\n sig = signer.sign(data)\n\n print(sig.signature)\n\n self.azure_pubkey.verify_signature(sig, data)\n with self.assertRaises(UnverifiedSignatureError):\n self.azure_pubkey.verify_signature(sig, b\"NOT DATA\")", "def sign(priv_key: rsa.RSAPrivateKey, msg: bytes) -> Signature:\n return priv_key.sign(msg, PADDING, HASH)", "def _sign_token(self, pid):\n self.ensure_one()\n # check token field exists\n if self._mail_post_token_field not in self._fields:\n raise NotImplementedError(_(\n \"Model %(model_name)s does not support token signature, as it does not have %(field_name)s field.\",\n model_name=self._name,\n field_name=self._mail_post_token_field\n ))\n # sign token\n secret = self.env[\"ir.config_parameter\"].sudo().get_param(\"database.secret\")\n token = (self.env.cr.dbname, self[self._mail_post_token_field], pid)\n return hmac.new(secret.encode('utf-8'), repr(token).encode('utf-8'), hashlib.sha256).hexdigest()", "def sign(self, cred):\n desc = self.descriptor()\n key = cred.secret_key.encode(\"utf-8\")\n hasher = hmac.new(key, desc.encode(\"utf-8\"), hashlib.sha1)\n sign = b64encode(hasher.digest()).decode()\n self.headers[\"Authorization\"] = \"AWS %s:%s\" % (cred.access_key, sign)\n return sign", "def digest(self, *args, **kwargs): # real signature unknown\n pass", "def digest(self, *args, **kwargs): # real signature unknown\n pass", "def digest(self, *args, **kwargs): # real signature unknown\n pass", "def digest(self, *args, **kwargs): # real signature unknown\n pass", "def digest(self, *args, **kwargs): # real signature unknown\n pass", "def digest(self, *args, **kwargs): # real signature unknown\n pass", "def sign(self, data: bytes, password: str) -> bytes:\n if self.is_watchonly:\n raise ValueError(\"Cannot sign transaction using a watch only account\")\n # mypy can't infer that the is_watchonly check ensures encrypted_key has a value\n private_key = self.private_key_from_nep2(self.encrypted_key.decode(\"utf-8\"), password) # type: ignore\n return cryptography.sign(data, private_key)", "def sign_tx(self, tx):\n if self.privkey:\n log.info('signing tx', tx=tx, account=self)\n tx.sign(self.privkey)\n else:\n raise ValueError('Locked account cannot sign tx')", "def sign(self, message, randombytes=urandom):\r\n int_header = 0x30 + logn[self.n]\r\n header = int_header.to_bytes(1, \"little\")\r\n\r\n salt = randombytes(SALT_LEN)\r\n hashed = self.hash_to_point(message, salt)\r\n\r\n # We repeat the signing procedure until we find a signature that is\r\n # short enough (both the Euclidean norm and the bytelength)\r\n '''\r\n print(\"---------Inside sign----------\")\r\n '''\r\n while(1):\r\n if (randombytes == urandom):\r\n s = self.sample_preimage(hashed)\r\n '''\r\n print(\"s: \", s)\r\n '''\r\n else:\r\n seed = randombytes(SEED_LEN)\r\n s = self.sample_preimage(hashed, seed=seed)\r\n norm_sign = sum(coef ** 2 for coef in s[0])\r\n norm_sign += sum(coef ** 2 for coef in s[1])\r\n # Check the Euclidean norm\r\n if norm_sign <= self.signature_bound:\r\n\r\n enc_s = compress(s[1], self.sig_bytelen - HEAD_LEN - SALT_LEN)\r\n # Check that the encoding is valid (sometimes it fails)\r\n if (enc_s is not False):\r\n return header + salt + enc_s\r\n '''\r\n else:\r\n print(\"-------------INVALID encoding---------------\")\r\n\r\n else:\r\n print(\"-------------NOT within signature bound---------------\")\r\n '''", "def Sign(self, msg):\n return hmac.new(self.key_bytes, msg, sha1).digest()", "def get_input_aux_data(self, data_to_sign: bytes, private_key: 'Key') -> tuple[bytes, bytes]:\n prehashed_msg = hashlib.sha256(hashlib.sha256(data_to_sign).digest()).digest()\n signature = private_key.sign(prehashed_msg)\n return private_key.sec(), signature", "def _sign_document(self):\n return False", "def ecdsa_tx_sign(txhash, priv):\n rawsig = ecdsa_raw_sign(txhash, priv)\n return der_encode_sig(*rawsig)", "def build_and_sign(builder, dest_address, payment_amount, prioritizer_seed=None):\n builder.append_payment_op(dest_address, str(payment_amount))\n builder.sign(builder.keypair.seed().decode())\n\n # prioritize transaction by adding a prioritizer signature\n if prioritizer_seed:\n builder.sign(prioritizer_seed)\n\n return builder.hash_hex(), builder.gen_xdr().decode()", "def sign(self, msg, key):\n\n if not isinstance(key, ec.EllipticCurvePrivateKey):\n raise TypeError(\"The private key must be an instance of \" \"ec.EllipticCurvePrivateKey\")\n\n self._cross_check(key.public_key())\n num_bits = key.curve.key_size\n num_bytes = (num_bits + 7) // 8\n asn1sig = key.sign(msg, ec.ECDSA(self.hash_algorithm()))\n # Cryptography returns ASN.1-encoded signature data; decode as JWS\n # uses raw signatures (r||s)\n (r, s) = decode_dss_signature(asn1sig)\n return int.to_bytes(r, num_bytes, \"big\") + int.to_bytes(s, num_bytes, \"big\")", "def create_signed_value( name, value ):", "def add_signature(self, pubkey: PublicKey, signature: Signature) -> None:\n presigner = Presigner(pubkey.to_solders(), signature)\n self._solders.partial_sign([presigner], self._solders.message.recent_blockhash)", "def sign_request(self, server_pk, credential, message, revealed_info):\n revealed_attr = revealed_info.split(',')\n credential = jsonpickle.decode(credential)\n\n #Sanitization in case revealed_info is empty\n while '' in revealed_attr:\n revealed_attr.remove('')\n\n #Check if attributes revealed are valid\n for attr in revealed_attr:\n if attr not in credential.attributes:\n raise RuntimeError(\"Revealed attributes are not in the credential\")\n\n \n signature = credential.sign(message, revealed_attr)\n\n return signature.serialize()" ]
[ "0.67569077", "0.6738855", "0.6735283", "0.6622873", "0.6465191", "0.64287317", "0.6230365", "0.6186728", "0.6170761", "0.61308557", "0.6089753", "0.6075846", "0.605452", "0.60319453", "0.6018945", "0.6009839", "0.5959301", "0.5959301", "0.5891443", "0.587198", "0.5857418", "0.5826056", "0.58157706", "0.5801469", "0.57805556", "0.5749226", "0.5738346", "0.5727683", "0.57184964", "0.5712703", "0.57095575", "0.5651338", "0.5640602", "0.5627416", "0.5619826", "0.5616212", "0.55965424", "0.55900276", "0.5582916", "0.5567916", "0.55521005", "0.5551422", "0.5530491", "0.5519465", "0.5518673", "0.5506781", "0.5503019", "0.54917943", "0.54897887", "0.5489339", "0.5483446", "0.5480883", "0.5475396", "0.5450888", "0.5448015", "0.5443052", "0.5442197", "0.5436588", "0.5427393", "0.54249597", "0.54116565", "0.5409232", "0.5406842", "0.5394451", "0.53881073", "0.53858584", "0.5358278", "0.5357728", "0.53557265", "0.5352058", "0.53473663", "0.5343627", "0.5317319", "0.5309589", "0.5306728", "0.53020316", "0.5296317", "0.5294308", "0.5288451", "0.5287364", "0.5287053", "0.526507", "0.52573216", "0.52550095", "0.52550095", "0.52550095", "0.52550095", "0.52550095", "0.52550095", "0.52536464", "0.52342683", "0.52320045", "0.5226946", "0.5216595", "0.52162975", "0.5213444", "0.5211138", "0.5206301", "0.5205886", "0.5204106", "0.5201229" ]
0.0
-1
Load commandline arguments, create Certificate Signing Request (CSR).
def req_command(args): if args.files: die("Unexpected positional arguments") subject_info = info_from_args(args) if subject_info.ca: msg('Request for CA cert') else: msg('Request for end-entity cert') subject_info.show(msg_show) # Load private key, create signing request key = load_key(args.key, load_password(args.password_file)) req = create_x509_req(key, subject_info) do_output(req_to_pem(req), args, 'req')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sign_command(args):\n if args.files:\n die(\"Unexpected positional arguments\")\n\n # Load certificate request\n if not args.request:\n die(\"Need --request\")\n subject_csr = load_req(args.request)\n\n reset_info = None\n if args.reset:\n reset_info = info_from_args(args)\n\n # Load CA info\n if not args.ca_info:\n die(\"Need --ca-info\")\n if args.ca_info.endswith('.csr'):\n issuer_obj = load_req(args.ca_info)\n else:\n issuer_obj = load_cert(args.ca_info)\n\n # Load CA private key\n issuer_key = load_key(args.ca_key, load_password(args.password_file))\n if not same_pubkey(issuer_key, issuer_obj):\n die(\"--ca-private-key does not match --ca-info data\")\n\n # Certificate generation\n cert = do_sign(subject_csr, issuer_obj, issuer_key, args.days, args.path_length, args.request, reset_info=reset_info)\n\n # Write certificate\n do_output(cert_to_pem(cert), args, 'x509')", "def CreateRequests(self, args):\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, self.resources)\n certificate = file_utils.ReadFile(args.certificate, 'certificate')\n private_key = file_utils.ReadFile(args.private_key, 'private key')\n\n request = self.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=self.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=self.project)\n\n return [request]", "def main():\n try:\n return run_sysca(sys.argv[1:])\n except InvalidCertificate as ex:\n die(str(ex))", "def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n client = holder.client\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.regionSslCertificates\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])", "def setup_request_commandline() -> Request:\r\n parser = argparse.ArgumentParser()\r\n parser.add_argument(\"key\", help=\"The key to use when encrypting or \"\r\n \"decrypting. This needs to be of \"\r\n \"length 8, 16 or 24\")\r\n parser.add_argument(\"-s\", \"--string\", help=\"The string that needs to be \"\r\n \"encrypted or decrypted\")\r\n parser.add_argument(\"-f\", \"--file\", help=\"The text file that needs to be\"\r\n \"encrypted or decrypted\")\r\n parser.add_argument(\"-o\", \"--output\", default=\"print\",\r\n help=\"The output of the program. This is 'print' by \"\r\n \"default, but can be set to a file name as well.\")\r\n parser.add_argument(\"-m\", \"--mode\", default=\"en\",\r\n help=\"The mode to run the program in. If 'en' (default)\"\r\n \" then the program will encrypt, 'de' will cause \"\r\n \"the program to decrypt\")\r\n try:\r\n args = parser.parse_args()\r\n request = Request()\r\n request.encryption_state = CryptoMode(args.mode)\r\n request.data_input = args.string\r\n request.input_file = args.file\r\n request.output = args.output\r\n request.key = args.key\r\n print(request)\r\n return request\r\n except Exception as e:\r\n print(f\"Error! Could not read arguments.\\n{e}\")\r\n quit()", "def setup_request_commandline() -> Request:\n parser = argparse.ArgumentParser()\n parser.add_argument(\"key\", help=\"The key to use when encrypting or \"\n \"decrypting. This needs to be of \"\n \"length 8, 16 or 24\")\n parser.add_argument(\"-s\", \"--string\", help=\"The string that needs to be \"\n \"encrypted or decrypted\")\n parser.add_argument(\"-f\", \"--file\", help=\"The text file that needs to be\"\n \"encrypted or decrypted\")\n parser.add_argument(\"-o\", \"--output\", default=\"print\",\n help=\"The output of the program. This is 'print' by \"\n \"default, but can be set to a file name as well.\")\n parser.add_argument(\"-m\", \"--mode\", default=\"en\",\n help=\"The mode to run the program in. If 'en' (default)\"\n \" then the program will encrypt, 'de' will cause \"\n \"the program to decrypt\")\n try:\n args = parser.parse_args()\n request = Request()\n request.encryption_state = CryptoMode(args.mode)\n request.data_input = args.string\n request.input_file = args.file\n request.output = args.output\n request.key = args.key\n print(request)\n return request\n except Exception as e:\n print(f\"Error! Could not read arguments.\\n{e}\")\n quit()", "def req_handler(args):\n key = _get_key(args)\n subject = get_subject_arguments()\n req = create_certificate_request(key, subject=subject, file_name=args.req_out)\n if not args.req_out:\n print(print_certificate_request(req))\n return req", "def selfsign_command(args):\n if args.files:\n die(\"Unexpected positional arguments\")\n\n subject_info = info_from_args(args)\n\n if subject_info.ca:\n msg('Request for CA cert')\n else:\n msg('Request for end-entity cert')\n subject_info.show(msg_show)\n\n # Load private key, create signing request\n key = load_key(args.key, load_password(args.password_file))\n subject_csr = create_x509_req(key, subject_info)\n\n # sign created request\n cert = do_sign(subject_csr, subject_csr, key, args.days, args.path_length, '<selfsign>')\n do_output(cert_to_pem(cert), args, 'x509')", "def __init__(__self__,\n resource_name: str,\n args: ServerCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def create_ssl_cert_request ( ssl_hostnames ) :\n first_hostname = ssl_hostnames[ 0 ]\n csr_filename = get_ssl_csr_filename( first_hostname )\n key_filename = get_ssl_key_filename( first_hostname )\n openssl_cnf = \"\"\"\n[req]\ndistinguished_name = req_distinguished_name\nreq_extensions = san_ext\n\n[req_distinguished_name]\ncountryName_default = US\nstateOrProvinceName_default = New York\nlocalityName_default = New York\norganizationalUnitName_default = Home Box Office, Inc\ncommonName_default = \"\"\" + first_hostname + \"\"\"\n\n[san_ext]\nbasicConstraints = CA:FALSE\nkeyUsage = nonRepudiation, digitalSignature, keyEncipherment\nsubjectAltName = @sans\n\n[sans]\n\"\"\"\n counter = 0\n for hostname in ssl_hostnames :\n counter += 1\n openssl_cnf += 'DNS.' + str( counter ) + ' = ' + hostname + '\\n'\n\n with open( first_hostname, 'w' ) as f :\n f.write( openssl_cnf )\n cmd = 'openssl req -new -newkey rsa:2048 -nodes -out ' + csr_filename + ' -keyout ' + key_filename\n cmd += ' -config ' + first_hostname + ' -subj \"/C=US/ST=New York/L=New York/O=Home Box Office Inc/CN=' + first_hostname + '\"'\n keygen = subprocess.call( cmd, shell = True )\n os.remove( first_hostname )\n if keygen != 0 :\n print \"Generation of SSL request failed!\"\n return None\n\n return { 'csr-filename' : csr_filename, 'key-filename' : key_filename }", "def __init__(__self__,\n resource_name: str,\n args: CertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def add_certificate_arguments(parser):\n group = parser.add_argument_group(\"Certificate management\")\n group.add_argument(\n \"-sn\", \"--serial_number\",\n help=\"Serial number for the certificate\",\n type=int,\n default=1\n )\n group.add_argument(\n \"-d\", \"--duration\",\n help=\"Period of validity for certificate (seconds)\",\n type=int,\n default=60*60*24*(365*100+25)\n )", "def sign_certificate(csr):\n unique_filename = str(uuid.uuid4().hex)\n\n file = open(\"./csr_req/%s.csr\" % unique_filename, \"w\")\n file.write(csr.decode(\"utf-8\"))\n file.close()\n\n subprocess.run([\"../ca/scripts/sign.sh\", unique_filename], check=False)\n\n file = open(\"./csr_req/%s.p7b\" % unique_filename, \"r\")\n cert = file.read()\n\n os.remove(\"./csr_req/%s.csr\" % unique_filename)\n os.remove(\"./csr_req/%s.p7b\" % unique_filename)\n\n return cert", "def main():\n licensify(_parse_args())", "def info_from_args(args):\n return CertInfo(\n subject=parse_dn(args.subject),\n usage=parse_list(args.usage),\n alt_names=parse_list(args.san),\n ocsp_nocheck=args.ocsp_nocheck,\n ocsp_must_staple=args.ocsp_must_staple,\n ocsp_must_staple_v2=args.ocsp_must_staple_v2,\n ocsp_urls=parse_list(args.ocsp_urls),\n crl_urls=parse_list(args.crl_urls),\n issuer_urls=parse_list(args.issuer_urls),\n permit_subtrees=parse_list(args.permit_subtrees),\n exclude_subtrees=parse_list(args.exclude_subtrees),\n ca=args.CA,\n path_length=args.path_length)", "def _Run(args, holder, ssl_certificate_ref):\n client = holder.client\n\n certificate_type, self_managed, managed = _ParseCertificateArguments(\n client, args)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n project=ssl_certificate_ref.project)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n collection = client.apitools_client.regionSslCertificates\n else:\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])", "def sign_handler(args):\n if not args.issuer_key and not args.issuer_cert:\n key = _get_key(args)\n subject = get_subject_arguments()\n\n cert = selfsigned_certificate_for_key(\n key,\n subject=subject,\n serial_number=int(args.serial_number),\n length=args.duration,\n file_name=args.cert_out\n )\n\n else:\n req = _get_request(args)\n issuer_cert = load_certificate(args.issuer_cert)\n issuer_key = load_key(args.issuer_key)\n cert = sign_request(\n req,\n issuer_cert=issuer_cert,\n issuer_key=issuer_key,\n length=args.duration,\n file_name=args.cert_out\n )\n\n if not args.cert_out:\n print(print_certificate(cert))", "def __init__(__self__,\n resource_name: str,\n args: OriginCaCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n return _Run(args, holder, ssl_certificate_ref)", "def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n return _Run(args, holder, ssl_certificate_ref)", "def __init__(__self__,\n resource_name: str,\n args: SSLCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def main(argv):\n\n\n parser = argparse.ArgumentParser(description='convert der to raw')\n parser.add_argument('-s','--secretkey_file', help='Secret key', required=True)\n parser.add_argument('-p','--publickey_file', help='Public key', required=True)\n args = parser.parse_args()\n\n secretkey_file = args.secretkey_file\n publickey_file = args.publickey_file\n\n\n privkey = SigningKey.from_der(open(secretkey_file).read())\n pubkey = VerifyingKey.from_der(open(publickey_file).read())\n\n open(secretkey_file[0:-4] + \".bin\", \"wb\").write(privkey.to_string())\n open(publickey_file[0:-4] + \".bin\", \"wb\").write(pubkey.to_string())", "def show_command(args):\n for fn in args.files:\n ext = os.path.splitext(fn)[1].lower()\n if ext == '.csr':\n cmd = ['openssl', 'req', '-in', fn, '-text']\n elif ext == '.crt':\n cmd = ['openssl', 'x509', '-in', fn, '-text']\n else:\n die(\"Unsupported file: %s\", fn)\n subprocess.check_call(cmd)", "def _set_arguments(self):\n cert_location = f\"dependencies{sep}certificates{sep}localuser.crt\"\n key_location = f\"dependencies{sep}certificates{sep}localuser.key\"\n assert Path(cert_location).exists(), (\n f\"The certificate isn't \"\n f\"present at location {Path(cert_location).absolute()}\"\n )\n assert Path(key_location).exists(), (\n f\"The certificate key isn't \"\n f\"present at location {Path(key_location).absolute()}\"\n )\n self._arguments = [\n (\n \"test-certificate-verify\",\n [\"-k\", key_location, \"-c\", cert_location],\n ),\n (\n \"test-sig-algs\",\n [],\n ),\n (\n \"test-clienthello-md5\",\n [],\n ),\n (\n \"test-tls13-pkcs-signature\",\n [],\n ),\n ]", "def initialise(self, args, environ):", "def generate(name, domain, country, state, locale, email,\n keytype, keylength):\n if not domain:\n logger.error(\n \"ctl:info:generate\", \"Choose a fully-qualified domain name of the \"\n \"certificate. Must match a domain present on the system\"\n )\n domain = click.prompt(\"Domain name\")\n if not country:\n logger.info(\n \"ctl:cert:generate\",\n \"Two-character country code (ex.: 'US' or 'CA')\"\n )\n country = click.prompt(\"Country code\")\n if not state:\n state = click.prompt(\"State/Province\")\n if not locale:\n locale = click.prompt(\"City/Town/Locale\")\n if not email:\n email = click.prompt(\"Contact email [optional]\")\n try:\n cmd = client().certificates.generate\n job, data = cmd(\n name, domain, country, state, locale, email, keytype, keylength)\n handle_job(job)\n except Exception as e:\n raise CLIException(str(e))", "def main():\n ssl_date_fmt = r'%b %d %H:%M:%S %Y %Z'\n #cert_file_name = os.path.join(os.path.dirname(__file__), \"testcert.pem\")\n\n parser = argparse.ArgumentParser(description='Parse a certificate and show days left')\n parser.add_argument('-v', '--verbose', action='store_true', help='show full certificate')\n parser.add_argument('cert', nargs='+', help='certifcate file(s)')\n args = parser.parse_args()\n for cert_file_name in args.cert:\n try:\n cert_dict = ssl._ssl._test_decode_cert(cert_file_name)\n serial = cert_dict['serialNumber']\n subject = dict(x[0] for x in cert_dict['subject'])\n issued_to = subject['commonName']\n time_left = datetime.datetime.strptime(cert_dict['notAfter'], ssl_date_fmt) - datetime.datetime.utcnow()\n if args.verbose:\n pp(cert_dict)\n ssl_expires_in(issued_to, serial, time_left)\n\n except Exception as error:\n print(\"Error decoding certificate: {:}\".format(error))", "def read_arguments(argv):\n\tif argv[0] in ('1', '2'):\n\t\tconos_config['endpoint'] = endpoint[argv[0]]\n\telse:\n\t\tusage()\n\n\tif argv[1] in ('dev', 'test', 'int', 'prod'):\n\t\tconos_config['environment'] = argv[1]\n\t\tconos_config['sts_url'] = eval(argv[1] + '_sts_url')\n\t\tconos_config['aicuu_url'] = eval(argv[1] + '_aicuu_url')\n\telse:\n\t\tusage()\n\n\tif len(argv) == 6:\n\t\tconos_config['number_threads'] = '1'\n\telse:\n\t\tif argv[6] in ('1', '2', '3', '4', '5', '6', '7', '8'):\n\t\t\tconos_config['number_threads'] = argv[6]\n\t\telse:\n\t\t\tusage()\n\n\tconos_config['client_id'] = argv[2]\n\tconos_config['client_secret'] = argv[3]\n\tconos_config['input_file'] = argv[4]\n\tconos_config['output_file'] = argv[5]", "def create_x509_req(privkey, subject_info):\n builder = x509.CertificateSigningRequestBuilder()\n builder = builder.subject_name(subject_info.get_name())\n builder = subject_info.install_extensions(builder)\n\n # create final request\n req = builder.sign(private_key=privkey, algorithm=SHA256(), backend=get_backend())\n return req", "def init_csr(privkey, names, cert_dir):\n csr_pem, csr_der = crypto_util.make_csr(privkey.pem, names)\n\n # Save CSR\n le_util.make_or_verify_dir(cert_dir, 0o755)\n csr_f, csr_filename = le_util.unique_file(\n os.path.join(cert_dir, \"csr-letsencrypt.pem\"), 0o644)\n csr_f.write(csr_pem)\n csr_f.close()\n\n logging.info(\"Creating CSR: %s\", csr_filename)\n\n return le_util.CSR(csr_filename, csr_der, \"der\")", "def _ParseCertificateArguments(client, args):\n self_managed = None\n managed = None\n certificate_type = None\n if args.certificate:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.SELF_MANAGED\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n self_managed = client.messages.SslCertificateSelfManagedSslCertificate(\n certificate=certificate, privateKey=private_key)\n if args.domains:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.MANAGED\n managed = client.messages.SslCertificateManagedSslCertificate(\n domains=args.domains)\n return certificate_type, self_managed, managed", "def AddCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server.\n \"\"\"\n parser.add_argument('--certificate', help=help_text, required=required)", "def run(send_to_bank=False):\n\n # Signed request\n sk = read_signing_key_file(os.path.join(SIGNING_KEY_DIR, 'cv_nid'))\n signed_request = generate_signed_request(\n data={\n 'end': '2020-07-09T22:10:25Z',\n 'start': '2020-08-09T22:10:25Z'\n },\n nid_signing_key=sk\n )\n\n if send_to_bank:\n send_request_to_bank(signed_request)\n\n write_json(\n os.path.join(SIGNED_REQUESTS_DIR, 'signed-validator-confirmation-services-request.json'),\n signed_request\n )", "def main():\n file_name = None\n key = None\n operation = None\n\n try:\n if len(sys.argv) == 1:\n raise Exception(\"No arguement passed!\")\n opts, args = getopt.getopt(sys.argv[1:], \"f:k:o:h\", [\"help\"])\n except Exception as error:\n print(error)\n sys.exit(1)\n\n for opt, arg in opts:\n if opt in (\"-h\", \"--help\"):\n usage()\n sys.exit()\n elif opt == \"-f\":\n file_name = arg\n elif opt == \"-k\":\n try:\n with open(arg) as key_file:\n key = key_file.read()\n except Exception as error:\n print(error)\n sys.exit()\n elif opt == \"-o\":\n operation = arg\n else:\n print(\"Invalid argument passed.\")\n sys.exit(1)\n \n if file_name == None or key == None or operation == None:\n print(\"Missing argument/s!\")\n usage()\n sys.exit(1)\n\n checker = DocumentChecker(file_name, key)\n\n if operation == \"1\":\n checker.add_sign()\n elif operation == \"2\":\n checker.check()\n elif operation == \"3\":\n checker.remove_sign()\n else:\n print(\"Invalid operation.\")\n sys.exit(1)", "def create_CA(dn):\n cmd_genrsa = [\"openssl\",\n \"genrsa\",\n \"-aes256\",\n \"-out\", f'{pki_dir}/ca.key',\n \"-passout\", f'pass:{ca_password}',\n f'{rsa_keysize}']\n cmd_req = [\"openssl\",\n \"req\",\n \"-new\",\n \"-x509\",\n \"-days\", \"999999\",\n \"-sha256\",\n \"-key\", f'{pki_dir}/ca.key',\n \"-out\", server_key_files[\"ca\"],\n \"-subj\", f'{dn}',\n \"-passin\", f'pass:{ca_password}']\n cmds = [cmd_genrsa, cmd_req]\n for cmd in cmds:\n exec_cmd(cmd)", "def initial_setup():\n\n if os.path.exists(cfg.ca_private_key_path()):\n pkey = _try_load_ca_private_key(cfg.ca_private_key_path())\n else:\n pkey = _generate_ca_private_key(cfg.ca_private_key_path())\n\n if os.path.exists(cfg.ca_cert_path()):\n _try_load_ca_cert(cfg.ca_cert_path())\n else:\n _generate_ca_cert(cfg.ca_cert_path(), pkey)", "def request_cert():\n\n api_request = shallow_copy(props)\n\n for key in ['ServiceToken', 'Region', 'Tags', 'Route53RoleArn']:\n api_request.pop(key, None)\n\n if 'ValidationMethod' in props:\n if props['ValidationMethod'] == 'DNS':\n\n # Check that we have all the hosted zone information we need to validate\n # before we create the certificate\n for name in set([props['DomainName']] + props.get('SubjectAlternativeNames', [])):\n get_zone_for(name)\n\n del api_request['DomainValidationOptions']\n\n e['PhysicalResourceId'] = acm.request_certificate(\n IdempotencyToken=i_token,\n **api_request\n )['CertificateArn']\n add_tags()", "def main(cli_args):\n store_obj = cert_human_py3.CertChainStore.from_socket(\n host=cli_args.host, port=cli_args.port\n )\n\n print(store_obj.dump_json)", "def create_csr(dn):\n tmp_file = f'/tmp/{get_temp_filename()}'\n key_filename = f'{tmp_file}.key'\n csr_filename = f'{tmp_file}.csr'\n cmd = [\n \"openssl\",\n \"req\",\n \"-subj\", f'{dn}',\n \"-newkey\", f'rsa:{rsa_keysize}',\n \"-keyout\", f'{key_filename}',\n \"-out\", f'{csr_filename}',\n \"-nodes\"\n ]\n exec_cmd(cmd)\n return read_keypair(key_filename, csr_filename)", "def CreateCrtFile(keyfile, csrfile):\n crtfile = tempfile.mkstemp()[1]\n cmd = [\n 'openssl',\n 'x509',\n '-req',\n '-days', '1',\n '-in', csrfile,\n '-signkey', keyfile,\n '-out', crtfile\n ]\n _RunCommand(cmd)\n return crtfile", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument('--corpus_dir', required=True)\n parser.add_argument('--feature_module', required=True)\n\n args = parser.parse_args()\n corpus_dir = args.corpus_dir\n feature_module = args.feature_module\n\n return build_file(corpus_dir, feature_module)", "def cli():\n config, auth, execute_now = read_command_line_arguments()\n main(config, auth, execute_now)", "def __init__(__self__,\n resource_name: str,\n args: CryptoKeyArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def create_server_certs_sign():\n global server_keystore\n\n dn_sign = \"/CN=server certificate sign RSA-PSS\"\n key_pair_rsa_sign = create_csr_pss(dn_sign)\n server_keystore[\"key-sign\"] = key_pair_rsa_sign[\"key\"]\n san = [f'URI.1 = {uuid.uuid4().urn}']\n server_keystore[\"crt-sign\"] = sign_csr(key_pair_rsa_sign[\"pub\"], dn_sign, san)", "def _Args(parser,\n include_l7_internal_load_balancing=False,\n support_managed_certs=False):\n parser.add_argument(\n '--description',\n help='An optional, textual description for the SSL certificate.')\n\n parser.display_info.AddCacheUpdater(\n flags.SslCertificatesCompleterBeta\n if include_l7_internal_load_balancing else flags.SslCertificatesCompleter)\n\n if support_managed_certs:\n managed_or_not = parser.add_group(\n mutex=True,\n required=True,\n help='Flags for managed or self-managed certificate. ')\n\n managed_or_not.add_argument(\n '--domains',\n metavar='DOMAIN',\n type=arg_parsers.ArgList(min_length=1),\n default=[],\n help=\"\"\"\\\n List of domains to create a managed certificate for.\n \"\"\")\n\n not_managed = managed_or_not.add_group('Flags for self-managed certificate')\n not_managed.add_argument(\n '--certificate',\n metavar='LOCAL_FILE_PATH',\n required=True,\n help=\"\"\"\\\n Path to a local certificate file to create a self-managed\n certificate. The certificate must be in PEM format. The certificate\n chain must be no greater than 5 certs long. The chain must include at\n least one intermediate cert.\n \"\"\")\n not_managed.add_argument(\n '--private-key',\n metavar='LOCAL_FILE_PATH',\n required=True,\n help=\"\"\"\\\n Path to a local private key file. The private key must be in PEM\n format and must use RSA or ECDSA encryption.\n \"\"\")\n else:\n parser.add_argument(\n '--certificate',\n required=True,\n metavar='LOCAL_FILE_PATH',\n help=\"\"\"\\\n Path to a local certificate file. The certificate must be in PEM\n format. The certificate chain must be no greater than 5 certs long. The\n chain must include at least one intermediate cert.\n \"\"\")\n\n parser.add_argument(\n '--private-key',\n required=True,\n metavar='LOCAL_FILE_PATH',\n help=\"\"\"\\\n Path to a local private key file. The private key must be in PEM\n format and must use RSA or ECDSA encryption.\n \"\"\")", "def CreateCsrFile(keyfile):\n csrfile = tempfile.mkstemp()[1]\n cmd = [\n 'openssl',\n 'req',\n '-new',\n '-key', keyfile,\n '-out', csrfile,\n '-subj', '/C=NA/ST=NA/L=NA/O=Chromium/OU=Test/CN=chromium.org'\n ]\n _RunCommand(cmd)\n return csrfile", "def run_sysca(argv):\n global QUIET\n\n ap = setup_args()\n args = ap.parse_args(argv)\n if args.quiet:\n QUIET = True\n if args.command == 'new-key':\n newkey_command(args)\n elif args.command == 'request':\n req_command(args)\n elif args.command == 'sign':\n sign_command(args)\n elif args.command == 'selfsign':\n selfsign_command(args)\n elif args.command == 'show':\n show_command(args)\n else:\n die(\"Unknown command: %s\", args.command)", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[str]] = None,\n certificate_name: Optional[pulumi.Input[str]] = None,\n domain: Optional[pulumi.Input[str]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: DomainTrustArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def opensslCmsSignedDataCreate( conveyedInfoFile, cert, privateKey ):\n opensslCmdArgs = [ \"openssl\", \"cms\", \"-sign\", \"-in\", conveyedInfoFile,\n \"-signer\", cert,\n \"-inkey\", privateKey,\n \"-outform\", \"der\", \"-nodetach\" ]\n conveyedInfoCmsSignedDerBase64 = runOpensslCmd( opensslCmdArgs, [ \"base64\" ] )\n return conveyedInfoCmsSignedDerBase64", "def init_cmd(ctx: click.Context, create_certs: str):\r\n from pathlib import Path\r\n from .init_funcs import init\r\n\r\n init(Path(create_certs) if create_certs is not None else None, ctx.obj[\"root_path\"])", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate_body: Optional[pulumi.Input[str]] = None,\n certificate_chain: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n path: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n __props__=None):\n ...", "def parse_command_line_args():\n parser = argparse.ArgumentParser(description=(\n 'HYAKUYOBAKO Data sender.'))\n parser.add_argument(\n '--project_id', required=True, help='GCP cloud project name')\n parser.add_argument(\n '--registry_id', required=True, help='Cloud IoT Core registry id')\n parser.add_argument(\n '--device_id', required=True, help='Cloud IoT Core device id')\n parser.add_argument(\n '--private_key_file',\n required=True,\n help='Path to private key file.')\n parser.add_argument(\n '--algorithm',\n choices=('RS256', 'ES256'),\n required=True,\n help='The encryption algorithm to use to generate the JWT.')\n parser.add_argument(\n '--cloud_region', default='us-central1', help='GCP cloud region')\n parser.add_argument(\n '--ca_certs',\n default='roots.pem',\n help=('CA root from https://pki.google.com/roots.pem'))\n parser.add_argument(\n '--message_type',\n choices=('event', 'state'),\n default='event',\n required=True,\n help=('Indicates whether the message to be published is a '\n 'telemetry event or a device state message.'))\n parser.add_argument(\n '--base_url',\n default=_BASE_URL,\n help=('Base URL for the Cloud IoT Core Device Service API'))\n parser.add_argument(\n '--jwt_expires_minutes',\n default=20,\n type=int,\n help=('Expiration time, in minutes, for JWT tokens.'))\n parser.add_argument(\n '--id',\n default=999,\n type=int,\n help=('Device id, not IoT Core device id for unique key.'))\n parser.add_argument(\n '--location_logitude',\n default=0.0,\n type=float,\n help=('Logitude of this deice. ex)35.658581'))\n parser.add_argument(\n '--location_latitude',\n default=0.0,\n type=float,\n help=('Latitude of this deice. ex)139.745433'))\n\n return parser.parse_args()", "def request(domain):\n if not domain:\n logger.error(\n \"ctl:info:generate\", \"Choose a fully-qualified domain name of the \"\n \"certificate. Must match a domain present on the system\"\n )\n domain = click.prompt(\"Domain name\")\n try:\n client().certificates.request_acme_certificate(domain)\n except Exception as e:\n raise CLIException(str(e))", "def create_selfsigned_certificates(name):\n pass", "def source(self, **subprocess_kwargs):\n cmd = [\"dpkg-buildpackage\", \"-S\", f\"--sign-key={self.sign_key}\"]\n self.log(f\"Building package: {cmd}\")\n cmd_ok(cmd, **subprocess_kwargs)", "def main(args):", "def main(args):", "def __init__(__self__, *,\n certificate: Optional[pulumi.Input[str]] = None,\n csr: Optional[pulumi.Input[str]] = None,\n expires_on: Optional[pulumi.Input[str]] = None,\n hostnames: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n min_days_for_renewal: Optional[pulumi.Input[int]] = None,\n request_type: Optional[pulumi.Input[str]] = None,\n requested_validity: Optional[pulumi.Input[int]] = None):\n if certificate is not None:\n pulumi.set(__self__, \"certificate\", certificate)\n if csr is not None:\n pulumi.set(__self__, \"csr\", csr)\n if expires_on is not None:\n pulumi.set(__self__, \"expires_on\", expires_on)\n if hostnames is not None:\n pulumi.set(__self__, \"hostnames\", hostnames)\n if min_days_for_renewal is not None:\n pulumi.set(__self__, \"min_days_for_renewal\", min_days_for_renewal)\n if request_type is not None:\n pulumi.set(__self__, \"request_type\", request_type)\n if requested_validity is not None:\n pulumi.set(__self__, \"requested_validity\", requested_validity)", "def init(args: Optional[List[bytes]] = None) -> None:\n warnings.warn(_deprecation_warning(), FutureWarning)\n parsed = {}\n if args:\n for arg in args:\n kv = arg.decode().split('=')\n if len(kv) == 2:\n parsed[kv[0]] = kv[1]\n collective.init(**parsed)", "def init(argv, doc, filename, parents=None):\n service = None\n flags = None\n parent_parsers = [tools.argparser, argparser]\n if parents is not None:\n parent_parsers.extend(parents)\n\n parser = argparse.ArgumentParser(\n description=doc,\n formatter_class=argparse.RawDescriptionHelpFormatter,\n parents=parent_parsers)\n flags = parser.parse_args(argv[1:])\n\n auth_path = os.path.dirname(filename)\n client_secrets_path = os.path.join(auth_path, CLIENT_SECRETS_FILE)\n service_account_path = os.path.join(auth_path, SERVICE_ACCOUNT_FILE)\n\n credentials = None\n if os.path.isfile(service_account_path):\n credentials = ServiceAccountCredentials.from_json_keyfile_name(\n service_account_path,\n scopes=API_SCOPE)\n elif os.path.isfile(client_secrets_path):\n message = tools.message_if_missing(client_secrets_path)\n flow = client.flow_from_clientsecrets(client_secrets_path,\n scope=API_SCOPE,\n message=message)\n storage_path = os.path.join(auth_path, SERVICE_NAME + '.dat')\n storage = Storage(storage_path)\n credentials = storage.get()\n if credentials is None or credentials.invalid:\n credentials = tools.run_flow(flow, storage, flags)\n else:\n print('No OAuth2 authentication files found. Checked:', file=sys.stderr)\n print('- %s' % service_account_path, file=sys.stderr)\n print('- %s' % client_secrets_path, file=sys.stderr)\n print('Please read the accompanying documentation.', file=sys.stderr)\n sys.exit(1)\n\n http = credentials.authorize(http=httplib2.Http())\n service = discovery.build(SERVICE_NAME, SERVICE_VERSION, http=http)\n return (service, flags)", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"URI\")\n args = parser.parse_args()\n smart_client(args.URI)", "def createCertRequest(pkey, digest=\"sha256\", **name):\n req = crypto.X509Req()\n subj = req.get_subject()\n\n for key, value in name.items():\n setattr(subj, key, value)\n\n req.set_pubkey(pkey)\n req.sign(pkey, digest)\n return req", "def create_environment(args):\n env.username = args.user\n env.password = args.password\n env.service_url = args.service_url\n env.quiet = args.quiet\n env.verbose = args.verbose\n env.manifest = args.manifest\n env.debug = args.debug\n env.always_confirm = args.yes\n env.args = args\n env.api = ravello.RavelloClient(env.username, env.password, env.service_url)", "def _create_certificate_chain():\n caext = X509Extension(b\"basicConstraints\", False, b\"CA:true\")\n not_after_date = datetime.date.today() + datetime.timedelta(days=365)\n not_after = not_after_date.strftime(\"%Y%m%d%H%M%SZ\").encode(\"ascii\")\n\n # Step 1\n cakey = PKey()\n cakey.generate_key(TYPE_RSA, 2048)\n cacert = X509()\n cacert.set_version(2)\n cacert.get_subject().commonName = \"Authority Certificate\"\n cacert.set_issuer(cacert.get_subject())\n cacert.set_pubkey(cakey)\n cacert.set_notBefore(b\"20000101000000Z\")\n cacert.set_notAfter(not_after)\n cacert.add_extensions([caext])\n cacert.set_serial_number(0)\n cacert.sign(cakey, \"sha256\")\n\n # Step 2\n ikey = PKey()\n ikey.generate_key(TYPE_RSA, 2048)\n icert = X509()\n icert.set_version(2)\n icert.get_subject().commonName = \"Intermediate Certificate\"\n icert.set_issuer(cacert.get_subject())\n icert.set_pubkey(ikey)\n icert.set_notBefore(b\"20000101000000Z\")\n icert.set_notAfter(not_after)\n icert.add_extensions([caext])\n icert.set_serial_number(0)\n icert.sign(cakey, \"sha256\")\n\n # Step 3\n skey = PKey()\n skey.generate_key(TYPE_RSA, 2048)\n scert = X509()\n scert.set_version(2)\n scert.get_subject().commonName = \"Server Certificate\"\n scert.set_issuer(icert.get_subject())\n scert.set_pubkey(skey)\n scert.set_notBefore(b\"20000101000000Z\")\n scert.set_notAfter(not_after)\n scert.add_extensions(\n [X509Extension(b\"basicConstraints\", True, b\"CA:false\")]\n )\n scert.set_serial_number(0)\n scert.sign(ikey, \"sha256\")\n\n return [(cakey, cacert), (ikey, icert), (skey, scert)]", "def _generate_csr_and_key():\n key = rsa.generate_private_key(\n public_exponent=65537,\n key_size=2048,\n backend=default_backend())\n\n csr = x509.CertificateSigningRequestBuilder().subject_name(x509.Name([\n x509.NameAttribute(NameOID.COMMON_NAME, u\"Magnum User\"),\n ])).sign(key, hashes.SHA256(), default_backend())\n\n result = {\n 'csr': csr.public_bytes(\n encoding=serialization.Encoding.PEM).decode(\"utf-8\"),\n 'key': key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()).decode(\"utf-8\"),\n }\n\n return result", "def __check_opts(self):\n self.ca_cert_file = os.environ['HOME'] + '/.cat_installer/ca.pem'\n self.pfx_file = os.environ['HOME'] + '/.cat_installer/user.p12'\n if not os.path.isfile(self.ca_cert_file):\n print(Messages.cert_error)\n sys.exit(2)", "def cli() -> object:\n parser = argparse.ArgumentParser(description=\"Expression Compiler\")\n parser.add_argument(\"sourcefile\", type=argparse.FileType('r'),\n help=\"Source program text\")\n parser.add_argument(\"outfile\", type=argparse.FileType('w'),\n nargs=\"?\", default=sys.stdout,\n help=\"Output file for assembly code\")\n args = parser.parse_args()\n return args", "def _get_request(args):\n input_request = args.input_request\n request = None\n if input_request:\n from pathlib import Path\n req_file = Path(input_request)\n if req_file.is_file():\n request = load_certificate_request(req_file)\n\n if not request:\n request = req_handler(args)\n\n return request", "def setup_request_commandline():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"mode\",\n choices=(\"pokemon\", \"ability\", \"move\"))\n\n input_group = parser.add_mutually_exclusive_group(required=True)\n input_group.required = True\n input_group.add_argument(\"--inputfile\")\n input_group.add_argument(\"--inputdata\")\n\n parser.add_argument(\"--expanded\",\n action=\"store_true\")\n\n parser.add_argument(\"--output\")\n\n try:\n args = parser.parse_args()\n request = Request()\n request.mode = args.mode\n request.is_input_file = True if args.inputdata is None else False\n request.input_content = args.inputfile if args.inputdata is None else args.inputdata\n request.expanded = args.expanded\n request.output = args.output\n return request\n except Exception as e:\n print(f\"Error! Could not read arguments.\\n{e}\")\n quit()", "def initialize():\n\n parser = argparse.ArgumentParser(\n description='This function takes a gene count file, a gene name, and \\\n an output file as parameters, and creates a file with the \\\n sample IDs and counts for that gene.')\n parser.add_argument('-i',\n '--data',\n type=str,\n help='The file name of the dataset.',\n required=True)\n parser.add_argument('-g',\n '--gene',\n type=str,\n help='The name of the target gene.',\n required=True)\n parser.add_argument('-o',\n '--output',\n type=str,\n help='The file name of the output file.',\n required=True)\n\n args_parse = parser.parse_args()\n\n return args_parse", "def main():\n parser = argparse.ArgumentParser(description=(\n 'Canonicalize Call Graphs to FASTEN Canonical Call Graphs'))\n parser.add_argument('directory', help=(\n 'a directory with the Call Graph, and description files'))\n parser.add_argument('-f', '--forge', default='debian', help=(\n 'forge of the analyzed project. For example, it could be debian, '\n 'or GitHub'))\n parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',\n help='print logs to the console')\n parser.add_argument('-L', '--file-logging', dest='file_logging',\n action='store_true',\n help='save logs to a file')\n parser.add_argument('-l', '--logging-level', dest='logging_level',\n choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO',\n 'DEBUG'],\n default='DEBUG', help='logging level for logs')\n parser.add_argument('-c', '--custom-deps', dest='custom_deps',\n default=None, help='custom user defined dependencies')\n parser.add_argument('-r', '--regex-product', dest='regex_product',\n default=None, help='regex to match product\\'s files')\n parser.add_argument('-o', '--output', dest='output', default=None,\n help='file to save the canonicalized call graph')\n args = parser.parse_args()\n can = C_Canonicalizer(args.directory,\n forge=args.forge,\n console_logging=args.verbose,\n file_logging=args.file_logging,\n logging_level=args.logging_level,\n custom_deps=args.custom_deps,\n product_regex=args.regex_product)\n can.canonicalize()", "def main(args):\n cli = CLI()\n # Check arguments\n cli.parse_arguments(args)", "def sign_certificate_request(csr, rootkey, rootcrt, client_key, domain_name, notBefore, notAfter):\n\n serial_number = int(str(uuid.uuid4().int)[:20])\n crt = x509.CertificateBuilder().subject_name(\n csr.subject\n ).issuer_name(\n rootcrt.subject\n ).public_key(\n csr.public_key()\n ).serial_number(\n serial_number # pylint: disable=no-member\n ).not_valid_before(\n notBefore\n ).not_valid_after(\n notAfter\n ).add_extension(\n extension=x509.KeyUsage(\n digital_signature=True, key_encipherment=True, content_commitment=True,\n data_encipherment=False, key_agreement=False, encipher_only=False, decipher_only=False, key_cert_sign=False, crl_sign=False\n ),\n critical=True\n ).add_extension(\n extension=x509.BasicConstraints(ca=False, path_length=None),\n critical=True\n ).add_extension(\n extension=x509.AuthorityKeyIdentifier.from_issuer_public_key(rootkey.public_key()),\n critical=False\n ).add_extension(\n csr.extensions.get_extension_for_oid(ExtensionOID.SUBJECT_ALTERNATIVE_NAME).value,\n critical=False,\n ).sign(\n private_key=rootkey,\n algorithm=hashes.SHA256(),\n backend=default_backend()\n )\n\n ##storing client's .crt\n with open(domain_name + \".crt\", 'wb') as f:\n f.write(crt.public_bytes(encoding=serialization.Encoding.PEM))", "def main(args=None):", "def main(args=None):", "def sign (self):\n print(\"*** signing the inno setup installer ***\")\n pfxfile = r'scripts\\%s.pfx' % self.lname\n if os.path.isfile(pfxfile):\n path = get_windows_sdk_path()\n signtool = os.path.join(path, \"bin\", \"signtool.exe\")\n if os.path.isfile(signtool):\n cmd = [signtool, 'sign', '/f', pfxfile, self.distfile]\n subprocess.check_call(cmd)\n else:\n print(\"No signed installer: signtool.exe not found.\")\n else:\n print(\"No signed installer: certificate %s not found.\" % pfxfile)", "def do_genconfig(args):\n\n print(\"========= DEFAULT ========\")\n debug = utils.get_input(\n \"Enable agent in debug mode [y/N]: \") or 'n'\n retry_interval = utils.get_input(\n \"Type the polling interval in seconds for daemon to manage the nodes: \")\n batch_publishing_interval = utils.get_input(\n \"Type the publishing interval in seconds for daemon to push the metrics: \")\n refresh_interval = utils.get_input(\n \"Type the polling interval in seconds to get health status directly from OneView: \")\n scmb_certificate_dir = utils.get_input(\n \"Type the certificates directory to register in OneView SCMB [/var/run/oneview-monasca]: \")\n auth_retry_limit = utils.get_input(\n \"Type the maximum number of attempts to try authenticate in REST API: \")\n\n debug = 'false' if debug == 'n' else 'true'\n retry_interval = retry_interval if retry_interval else \"300\"\n refresh_interval = refresh_interval if refresh_interval else \"180\"\n batch_publishing_interval = batch_publishing_interval if batch_publishing_interval else \"60\"\n\n auth_retry_limit = auth_retry_limit if auth_retry_limit else \"5\"\n scmb_certificate_dir = scmb_certificate_dir if scmb_certificate_dir else \"/var/run/oneview-monasca\"\n\n scmb_certificate_dir = os.path.realpath(os.path.expanduser(scmb_certificate_dir))\n utils.makedirs(scmb_certificate_dir)\n\n print(\"========= Openstack =========\")\n auth_url = utils.get_input(\"Type the Keystone url for authentication: \")\n auth_user = utils.get_input(\"Type the name of your OpenStack user: \")\n auth_password = getpass.getpass(\"Type the password for your OpenStack user: \")\n auth_tenant_name = utils.get_input(\"Type the tenant name that the OpenStack user will be authenticated: \")\n monasca_api_version = utils.get_input(\"Type a version of Monasca API that you want to use [2_0]: \")\n\n monasca_api_version = monasca_api_version if monasca_api_version else \"2_0\"\n\n print(\"========= OneView =========\")\n oneview_manager_url = utils.get_input(\"Type the manager_url for the OneView services: \")\n oneview_username = utils.get_input(\"Type your OneView username: \")\n oneview_password = getpass.getpass(\"Type your OneView user's password: \")\n oneview_insecure = utils.get_input(\"Would you like to allow insecure connections to OneView? [Y/n]: \") or \"Y\"\n max_polling_attempts = utils.get_input(\"Max polling attempts OneView requests: \")\n tls_cacert_file = utils.get_input(\"Path to your CA OneView certificate file, if any: \")\n\n oneview_host = utils.extract_domain_from_service_url(oneview_manager_url)\n oneview_insecure = \"true\" if oneview_insecure.lower() == 'y' else \"false\"\n max_polling_attempts = max_polling_attempts if max_polling_attempts else \"15\"\n\n fault_tolerance_enable = False\n group_name = coordinator_url = None\n while True:\n create = utils.get_input(\"Would you like to enable fault tolerance in the agent? [Y/n] \") or 'y'\n\n if create.lower() == 'y':\n print(\"========= Tooz =========\")\n\n group_name = utils.get_input(\"The group name for tooz configuration: \")\n coordinator_url = utils.get_input(\"The coordinator url for tooz configuration: \")\n fault_tolerance_enable = True\n break\n elif create.lower() == 'n':\n break\n else:\n print(\"Invalid option.\\n\")\n\n config_drivers = {}\n try:\n names = utils.list_names_driver(const.NAMESPACE_DISCOVERY_NODES, log=False)\n except Exception as ex:\n print('\\nCannot load installed drivers - Error caused by %s\\n' % str(ex))\n names = []\n\n for name in names:\n try:\n conf = utils.load_class_by_alias(\n const.NAMESPACE_DISCOVERY_NODES, name, log=False).genconfig()\n\n config_drivers[name.split('_')[-1]] = conf\n except Exception as ex:\n print('\\nCannot generating config file session to driver: %s - Error caused by %s\\n' % (name, str(ex)))\n\n # Write Configuration file #\n config = ConfigParser()\n config.set(\"DEFAULT\", \"debug\", debug)\n config.set(\"DEFAULT\", \"retry_interval\", retry_interval)\n config.set(\"DEFAULT\", \"periodic_refresh_interval\", refresh_interval)\n config.set(\"DEFAULT\", \"batch_publishing_interval\", batch_publishing_interval)\n\n config.set(\"DEFAULT\", \"auth_retry_limit\", auth_retry_limit)\n config.set(\"DEFAULT\", \"scmb_certificate_dir\", scmb_certificate_dir)\n\n if fault_tolerance_enable:\n config.add_section(\"tooz\")\n config.set(\"tooz\", \"group_name\", group_name)\n config.set(\"tooz\", \"coordinator_url\", coordinator_url)\n\n config.add_section(\"openstack\")\n config.set(\"openstack\", \"auth_url\", auth_url)\n config.set(\"openstack\", \"auth_user\", auth_user)\n config.set(\"openstack\", \"auth_password\", auth_password)\n config.set(\"openstack\", \"auth_tenant_name\", auth_tenant_name)\n config.set(\"openstack\", \"monasca_api_version\", monasca_api_version)\n\n config.add_section(\"oneview\")\n config.set(\"oneview\", \"host\", oneview_host)\n config.set(\"oneview\", \"manager_url\", oneview_manager_url)\n config.set(\"oneview\", \"username\", oneview_username)\n config.set(\"oneview\", \"password\", oneview_password)\n config.set(\"oneview\", \"allow_insecure_connections\", oneview_insecure)\n config.set(\"oneview\", \"max_polling_attempts\", max_polling_attempts)\n config.set(\"oneview\", \"tls_cacert_file\", tls_cacert_file)\n\n for driver in config_drivers:\n config.add_section(driver)\n for option, value in config_drivers[driver].items():\n config.set(driver, option, value)\n\n if not args.config_file:\n args.config_file = '~' + os.path.sep + 'oneview_monasca.conf'\n\n filename = utils.get_input(\n \"Type the path of the new configuration file [%s]: \" % args.config_file) or args.config_file\n full_filename = os.path.realpath(os.path.expanduser(filename))\n\n config_dir = os.path.dirname(full_filename)\n utils.makedirs(config_dir)\n\n with open(full_filename, 'w') as configfile:\n config.write(configfile)\n print(\"======\\nFile created successfully on '%s'!\\n======\" % filename)", "def main():\n logger.info(\"Running main function...\")\n args = get_args()\n # rose suite default location\n if args.suite_dir:\n default_suite = args.suite_dir\n rose_config_template = os.path.join(default_suite, \"rose-suite.conf\")\n\n # get command line arguments\n recipe_files = args.recipe_files\n config_file = args.config_file\n main_dir = args.main_dir\n log_level = args.log_level\n\n # setup rose suite\n run_rose = _setup_work(rose_config_template, recipe_files,\n config_file, main_dir, default_suite, log_level)\n\n # submit to cylc\n if not args.no_submit:\n _run_suite(run_rose)", "def make_cert_for_spki_request(spki_req_b64, serial, ident):\n spki_obj = netscape_spki_from_b64(spki_req_b64)\n if spki_obj is None:\n raise ValueError('Invalid SPKI object')\n\n root_crt = _try_load_ca_cert(cfg.ca_cert_path())\n root_key = _try_load_ca_private_key(cfg.ca_private_key_path())\n crt = _make_base_cert(spki_obj.get_pubkey(), 365, ident, serial)\n crt.set_issuer(root_crt.get_subject())\n crt.sign(root_key, 'sha256')\n return crypto.dump_certificate(crypto.FILETYPE_ASN1, crt)", "def _new_runtime_credentials(self, force=False, readonly=False):\n _log.debug(\"new_runtime_credentials\")\n #Create keys and certificate request\n private_key = os.path.join(self.runtime_dir, \"private\", \"private.key\")\n private = os.path.dirname(private_key)\n _log.debug(\"new_runtime: %s\" % self.runtime_dir)\n out = os.path.join(self.runtime_dir, \"{}.csr\".format(self.node_name))\n _log.debug(\"out dir: %s\"% out)\n # Create ECC-based certificate\n log = subprocess.Popen([\"openssl\", \"ecparam\", \"-genkey\",\n \"-name\", \"prime256v1\",\n \"-out\", private_key],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n stdout, stderr = log.communicate()\n if log.returncode != 0:\n raise IOError(stderr)\n\n log = subprocess.Popen([\"openssl\", \"req\", \"-new\",\n \"-config\",self.configfile,\n # \"-subj\", subject,\n \"-key\", private_key,\n \"-nodes\",\n \"-utf8\",\n \"-out\", out],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n stdout, stderr = log.communicate()\n if log.returncode != 0:\n raise IOError(stderr)\n\n return out", "def AddClientCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server. Database Migration Service\n encrypts the value when storing it.\n \"\"\"\n parser.add_argument('--client-certificate', help=help_text, required=required)", "def run(self, line):\r\n if os.name == 'nt':\r\n if not ctypes.windll.shell32.IsUserAnAdmin() != 0:\r\n self.app.typepath.adminpriv = False\r\n elif not os.getuid() == 0:\r\n self.app.typepath.adminpriv = False\r\n\r\n nargv = []\r\n curr = []\r\n argfound = False\r\n\r\n if \"--version\" in line or \"-V\" in line:\r\n sys.stdout.write(\"\"\"%(progname)s %(version)s\\n\"\"\" % \\\r\n {'progname': versioning.__longname__, 'version': \\\r\n versioning.__version__})\r\n sys.stdout.flush()\r\n sys.exit(self.retcode)\r\n\r\n else:\r\n for argument in enumerate(line):\r\n if not argfound and not argument[1].startswith('-'):\r\n nargv = line[argument[0]:]\r\n break\r\n else:\r\n argfound = False\r\n\r\n if argument[1] == \"-c\":\r\n argfound = True\r\n\r\n curr.append(argument[1])\r\n\r\n (self.opts, _) = self.parser.parse_args(curr)\r\n\r\n try:\r\n Encryption.encode_credentials('test')\r\n self.app.set_encode_funct(Encryption.encode_credentials)\r\n self.app.set_decode_funct(Encryption.decode_credentials)\r\n self.encoding = True\r\n except redfish.hpilo.risblobstore2.ChifDllMissingError:\r\n self.encoding = False\r\n\r\n if self.opts.config is not None and len(self.opts.config) > 0:\r\n if not os.path.isfile(self.opts.config):\r\n self.retcode = ReturnCodes.CONFIGURATION_FILE_ERROR\r\n sys.exit(self.retcode)\r\n\r\n self.app.config_file = self.opts.config\r\n\r\n self.app.config_from_file(self.app.config_file)\r\n if self.opts.logdir and self.opts.debug:\r\n logdir = self.opts.logdir\r\n else:\r\n logdir = self.app.config.get_logdir()\r\n\r\n if logdir and self.opts.debug:\r\n try:\r\n os.makedirs(logdir)\r\n except OSError as ex:\r\n if ex.errno == errno.EEXIST:\r\n pass\r\n else:\r\n raise\r\n\r\n if self.opts.debug:\r\n logfile = os.path.join(logdir, versioning.__shortname__+'.log')\r\n\r\n # Create a file logger since we got a logdir\r\n lfile = logging.FileHandler(filename=logfile)\r\n formatter = logging.Formatter(\"%(asctime)s %(levelname)s\\t: \" \\\r\n \"%(message)s\")\r\n\r\n lfile.setFormatter(formatter)\r\n lfile.setLevel(logging.DEBUG)\r\n LOGGER.addHandler(lfile)\r\n self.app.LOGGER = LOGGER\r\n\r\n cachedir = None\r\n if self.opts.nocache:\r\n self.app.config.set_cache(False)\r\n else:\r\n self.app.config.set_cachedir(os.path.join(self.opts.config_dir, \\\r\n 'cache'))\r\n cachedir = self.app.config.get_cachedir()\r\n\r\n if cachedir:\r\n try:\r\n os.makedirs(cachedir)\r\n except OSError as ex:\r\n if ex.errno == errno.EEXIST:\r\n pass\r\n else:\r\n raise\r\n\r\n if (\"login\" in line or any(x.startswith(\"--url\") for x in line) or not line)\\\r\n and not (any(x.startswith((\"-h\", \"--h\")) for x in nargv) or \"help\" in line):\r\n self.app.logout()\r\n else:\r\n self.app.restore()\r\n self.opts.is_redfish = self.app.updatedefinesflag(redfishflag=\\\r\n self.opts.is_redfish)\r\n\r\n if nargv:\r\n try:\r\n self.retcode = self._run_command(self.opts, nargv)\r\n if self.app.config.get_cache():\r\n if (\"logout\" not in line) and (\"--logout\" not in line):\r\n self.app.save()\r\n else:\r\n self.app.logout()\r\n except Exception as excp:\r\n self.handle_exceptions(excp)\r\n\r\n return self.retcode\r\n else:\r\n self.cmdloop(self.opts)\r\n\r\n if self.app.config.get_cache():\r\n self.app.save()\r\n else:\r\n self.app.logout()", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate_id: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def handle_cmdline_args():\n\n parser = argparse.ArgumentParser(\n description='Generate synthetic data from a specification in a json '\n 'file using the \"synth-method\" described in the json file. ')\n\n parser.add_argument(\n '-i', dest='infile', required=True,\n help='The input json file. Must contain a \"synth-method\" property')\n\n parser.add_argument(\n '-o', dest='outfile_prefix', required=True, help='The prefix of the output paths (data json and csv), relative to the QUIPP-pipeline root directory')\n\n args = parser.parse_args()\n return args", "def fusion_api_generate_certificate_signing_request(self, body, api=None, headers=None):\n return self.wsc.post(body, api=api, headers=headers)", "def __init__(self, args:argparse.Namespace):\n\t\tglobal DISTRO\n\n\t\tself.dispersion = args.dispersion if args.dispersion > 0 else 0\n\t\tself.login_dispersion = args.login_dispersion if args.login_dispersion > 0 else 0\n\t\tself.wait_for_parents = bool(args.wait_for_parents)\n\t\tself.retries = args.retries if args.retries > 0 else 0\n\t\tself.rev_proxy_disable = args.rev_proxy_disable\n\t\tself.verify = not args.insecure\n\n\t\tsetLogLevel(args.log_level)\n\n\t\tlogging.info(\"Distribution detected as: '%s'\", DISTRO)\n\n\t\tself.hostname = (platform.node().split('.')[0], platform.node())\n\t\tlogging.info(\"Hostname detected as: '%s'\", self.fullHostname)\n\n\t\ttry:\n\t\t\tself.mode = Configuration.Modes[args.Mode.upper()]\n\t\texcept KeyError as e:\n\t\t\traise ValueError(\"Unrecognized Mode: '%s'\" % args.Mode)\n\n\t\tself.tsroot = parseTSRoot(args.ts_root)\n\t\tlogging.info(\"ATS root installation directory set to '%s'\", self.tsroot)\n\n\t\tself.useSSL, self.toHost, self.toPort = parseTOURL(args.to_url, self.verify)\n\t\tself.username, self.password = args.to_user, args.to_password", "def main() -> None:\n init(args=sys.argv[1:])", "def make( self, arguments = None ):\n\n # no arguments given, assume the command-line arguments are needed\n if arguments is None:\n arguments = sys.argv\n\n # set up a compiler argument handler\n parser = Cargs()\n\n # configure the argument parser\n for key, patt, conf in config[ 'compiler' ]:\n parser.add_argument( key, patt, **conf )\n\n # load the argument list\n args = parser.load()\n\n # write input hashes to output file\n with open( args.output, 'w' ) as ofh:\n for inp in args.input:\n with open( inp, 'r' ) as ifh:\n ofh.write(\n '{} {}\\n'.format(\n hashlib.sha1( ifh.read() ).hexdigest(),\n inp\n )\n )\n\n # report what was done (ZIH - could use some more detail/results)\n event = {\n 'arguments' : arguments\n }\n self._out.write( '{}\\n'.format( json.dumps( event, indent = 4 ) ) )\n\n # return success\n return 0", "def cli(args): # noqa; pylint: disable=unused-argument", "def init_args():\n parser = argparse.ArgumentParser(\n description=\"DeltaSherlock Client software.\")\n parser.add_argument('-v', '--version', action='version', version=VERSION)\n parser.add_argument('-c', '--config', action='store', dest='config_file',\n default='./config.ini', help=\"Path to config file. [default: \\\n %(default)s]\")\n parser.add_argument('-d', '--daemon', action='store_true', dest='daemon',\n default=False, help=\"Run in daemon mode. [default: \\\n %(default)s]\")\n return parser.parse_args()", "def main(argv):\n\n # boot\n args = parse_args(argv)\n pynexus.config.set(api_endpoint = args.config['api_endpoint'],\n auth_user = args.config['user'],\n auth_pass = args.config['pass'])\n\n\n # get report request\n report = json.load(open(args.report_request, 'r'))\n\n\n # inject time params\n time_params = {'start' : 'start_date',\n 'end' : 'end_date',\n 'interval': 'report_interval'}\n\n for arg, param in time_params.items():\n report['report'][param] = getattr(args, arg)\n\n\n # prompt user for confirmation\n if not args.no_interaction:\n prompt_user_for_confirmation(report)\n\n\n # determine report path\n report_dir = os.path.join(args.save_dir, report['report']['report_type'])\n\n if not os.path.exists(report_dir):\n os.makedirs(report_dir)\n\n report_file = os.path.join(\n report_dir,\n datetime.datetime.now().strftime('%Y%m%d_%H%M.csv')\n )\n\n # fetch and save report\n pynexus.report.get(report, report_file, debug=args.debug)\n\n\n ##\n print os.path.realpath(report_file)", "def create_pki():\n os.mkdir(pki_dir)\n os.mkdir(f'{pki_dir}/newcerts')\n Path(f'{pki_dir}/index.txt').touch()\n with open(f'{pki_dir}/serial', 'w') as serial_file:\n serial_file.write('00000000')\n serial_file.close()\n create_CA('/CN=My cool CA/O=Honest Achmed/OU=Used Cars/C=EU')", "def main():\n # This is used to store the certificate filename\n cert = \"\"\n\n # Setup a signal handler to catch control-c and clean up the cert temp file\n # No way to catch sigkill so try not to do that.\n # noinspection PyUnusedLocal\n def sigint_handler(sig, frame): # pylint:disable=unused-argument\n \"\"\"Handle interrupt signals.\"\"\"\n if not args.cert:\n try:\n os.unlink(cert)\n except OSError: # pylint:disable=pointless-except\n pass\n print \"Exiting...\"\n sys.exit(0)\n\n parser = ArgumentParser('Remote APIC API Inspector and GUI Log Server')\n\n parser.add_argument('-a', '--apicip', required=False, default='8.8.8.8',\n help='If you have a multihomed system, where the ' +\n 'apic is on a private network, the server will ' +\n 'print the ip address your local system has a ' +\n 'route to 8.8.8.8. If you want the server to ' +\n 'print a more accurate ip address for the ' +\n 'server you can tell it the apicip address.')\n\n parser.add_argument('-c', '--cert', type=str, required=False,\n help='The server certificate file for ssl ' +\n 'connections, default=\"server.pem\"')\n\n parser.add_argument('-d', '--delete_imdata', action='store_true',\n default=False, required=False,\n help='Strip the imdata from the response and payload')\n\n parser.add_argument('-e', '--exclude', action='append', nargs='*',\n default=[], choices=['subscriptionRefresh',\n 'aaaRefresh',\n 'aaaLogout',\n 'HDfabricOverallHealth5min-0',\n 'topInfo', 'all'],\n help='Exclude certain types of common noise queries.')\n\n parser.add_argument('-i', '--indent', type=int, default=2, required=False,\n help='The number of spaces to indent when pretty ' +\n 'printing')\n\n parser.add_argument('-l', '--location', default='/apiinspector',\n required=False,\n help='Location that transaction logs are being ' +\n 'sent to, default=/apiinspector')\n\n parser.add_argument('-n', '--nice-output', action='store_true',\n default=False, required=False,\n help='Pretty print the response and payload')\n\n parser.add_argument('-p', '--port', type=int, required=False, default=8987,\n help='Local port to listen on, default=8987')\n\n parser.add_argument('-s', '--sslport', type=int, required=False,\n default=8443,\n help='Local port to listen on for ssl connections, ' +\n 'default=8443')\n\n parser.add_argument('-r', '--requests-log', action='store_true',\n default=False, required=False,\n help='Log server requests and response codes to ' +\n 'standard error')\n\n parser.add_argument('-t', '--title', default='SimpleAciUiLogServer',\n required=False,\n help='Change the name shown for this application ' +\n 'when accessed with a GET request')\n\n parser.add_argument('-ty', '--type', action='append', nargs='*',\n default=['all'], choices=['POST', 'GET', 'undefined',\n 'EventChannelMessage'],\n help='Limit logs to specific request types.')\n\n args = parser.parse_args()\n\n logging.basicConfig(level=logging.DEBUG,\n format='%(asctime)s %(levelname)s - \\n%(message)s')\n if args.exclude:\n # Flatten the list\n args.exclude = [val for sublist in args.exclude for val in sublist]\n\n if not args.location.startswith(\"/\"):\n args.location = \"/\" + str(args.location)\n\n if args.type:\n # Flatten the list\n args.type = [val for sublist in args.type for val in sublist]\n\n ThreadingSimpleAciUiLogServer.prettyprint = args.nice_output\n ThreadingSimpleAciUiLogServer.indent = args.indent\n ThreadingSimpleAciUiLogServer.strip_imdata = args.delete_imdata\n\n # Instantiate a http server\n http_server = ThreadingSimpleAciUiLogServer((\"\", args.port),\n log_requests=args.requests_log,\n location=args.location,\n excludes=args.exclude,\n app_name=args.title)\n\n if not args.cert:\n # Workaround ssl wrap socket not taking a file like object\n cert_file = tempfile.NamedTemporaryFile(delete=False)\n cert_file.write(SERVER_CERT)\n cert_file.close()\n cert = cert_file.name\n print(\"\\n+++WARNING+++ Using an embedded self-signed certificate for \" +\n \"HTTPS, this is not secure.\\n\")\n else:\n cert = args.cert\n\n # Instantiate a https server as well\n https_server = ThreadingSimpleAciUiLogServer((\"\", args.sslport),\n cert=cert,\n location=args.location,\n log_requests=args.requests_log,\n excludes=args.exclude,\n app_name=args.title)\n\n signal.signal(signal.SIGINT, sigint_handler) # Or whatever signal\n\n # Example of registering a function for a specific method. The funciton\n # needs to exist of course. Note: undefined seems to be the same as a\n # GET but the logging facility on the APIC seems to get in a state where\n # instead of setting the method properly it sets it to undefined.\n # These registered functions could then be used to take specific actions or\n # be silent for specific methods.\n # http_server.register_function(GET)\n # http_server.register_function(POST)\n # http_server.register_function(HEAD)\n # http_server.register_function(DELETE)\n # http_server.register_function(undefined)\n # http_server.register_function(EventChannelMessage)\n\n # This simply sets up a socket for UDP which has a small trick to it.\n # It won't send any packets out that socket, but this will allow us to\n # easily and quickly interogate the socket to get the source IP address\n # used to connect to this subnet which we can then print out to make for\n # and easy copy/paste in the APIC UI.\n ip_add = [(s.connect((args.apicip, 80)), s.getsockname()[0], s.close()) for\n s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1]\n\n print(\"Servers are running and reachable via:\\n\")\n print(\"http://\" + str(ip_add) + \":\" + str(args.port) + args.location)\n print(\"https://\" + str(ip_add) + \":\" + str(args.sslport) + args.location +\n \"\\n\")\n print(\"Make sure your APIC(s) are configured to send log messages: \" +\n \"welcome username -> Start Remote Logging\")\n print(\"Note: If you connect to your APIC via HTTPS, configure the \" +\n \"remote logging to use the https server.\")\n serve_forever([http_server, https_server])", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def main(argv):\n # -- load our run database and make it global --\n global crysDB\n with open(\"crysDB.json\") as f:\n crysDB = json.load(f)\n\n # -- parse args --\n par = argparse.ArgumentParser(description=\"coherent crystal characterization suite\")\n arg = par.add_argument\n arg(\"-c\", \"--crys\", type=str, help=\"set crystal S/N\")\n arg(\"-p\", \"--proc\", type=str, help=\"process a crystal\")\n arg(\"-t\", \"--temp\", type=str, help='start temperature data taking')\n arg(\"-pt\", \"--printtemp\", type=str, help='print current temperature')\n arg(\"-a\", \"--all\", action=\"store_true\", help=\"process all crystals in the DB\")\n arg(\"-o\", \"--over\", action=\"store_true\", help=\"overwrite existing files\")\n arg(\"-z\", \"--zip\", action=\"store_true\", help='run gzip on raw files (on cenpa-rocks)')\n arg(\"-s\", \"--sync\", action=\"store_true\", help='sync DAQ with cenpa-rocks')\n args = vars(par.parse_args())\n\n # -- set parameters --\n crys_sn, overwrite = None, False\n\n if args[\"crys\"]:\n crys_sn = args[\"crys\"]\n\n if args[\"over\"]:\n overwrite = args[\"over\"]\n\n # -- run analysis --\n if args[\"proc\"]:\n sn = args[\"proc\"]\n process_crystal(sn, overwrite)\n\n if args[\"all\"]:\n all_sns = [k for k in crysDB if \"SN\" in k]\n for sn in all_sns:\n process_crystal(sn, overwrite)\n\n if args[\"sync\"]:\n sync_data()\n\n if args[\"zip\"]:\n # clean_gzip()\n zip_data(overwrite)\n\n if args[\"temp\"]:\n \"\"\"\n Run number should be the first run number entry (for 600V) in the ELOG.\n \"\"\"\n run_num = args[\"temp\"]\n measure_temp(run_num)\n\n if args[\"printtemp\"]:\n print_temp()", "def generate_key_and_cert():\n signing_key = rsa.generate_private_key(backend=crypto_default_backend(), public_exponent=65537, key_size=2048)\n subject = issuer = x509.Name(\n [\n x509.NameAttribute(NameOID.COUNTRY_NAME, 'NO'),\n x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, 'OSLO'),\n x509.NameAttribute(NameOID.LOCALITY_NAME, 'OSLO'),\n x509.NameAttribute(NameOID.ORGANIZATION_NAME, 'Intility AS'),\n x509.NameAttribute(NameOID.COMMON_NAME, 'intility.no'),\n ]\n )\n signing_cert = (\n x509.CertificateBuilder()\n .subject_name(subject)\n .issuer_name(issuer)\n .public_key(signing_key.public_key())\n .serial_number(x509.random_serial_number())\n .not_valid_before(datetime.utcnow())\n .not_valid_after(\n # Our certificate will be valid for 10 days\n datetime.utcnow()\n + timedelta(days=10)\n # Sign our certificate with our private key\n )\n .sign(signing_key, hashes.SHA256(), crypto_default_backend())\n .public_bytes(crypto_serialization.Encoding.DER)\n )\n return signing_key, signing_cert", "def cli_arguments():\n\n parser = argparse.ArgumentParser(\n formatter_class=argparse.RawDescriptionHelpFormatter,\n usage=f\"\\n{Color.DETAIL}pdforce.py [-p <pdf>] [-w <wordlist>] [-e <encoding>] [-o <output>] [-c] [-h/--help]{Color.END}\",\n description=f\"{Color.EMPHASIS}{TITLE}\\nLightweight PDF password cracker. USE FOR LEGAL INTENTS ONLY.{Color.END}\",\n epilog=f\"{Color.EMPHASIS}Made by @poponealex - https://github.com/poponealex{Color.END}\",\n )\n\n parser.add_argument(\n \"-p\",\n \"--pdf\",\n type=str,\n help=f\"{Color.INFORMATION}Path to the pdf file.{Color.END}\",\n action=\"store\",\n default=\"\",\n )\n\n parser.add_argument(\n \"-w\",\n \"--wordlist\",\n type=str,\n help=f\"{Color.INFORMATION}Path to the wordlist.{Color.END}\",\n action=\"store\",\n default=\"\",\n )\n\n parser.add_argument(\n \"-e\",\n \"--encoding\",\n type=str,\n help=f\"{Color.INFORMATION}Specify an encoding for the wordlist (https://docs.python.org/3/library/codecs.html#standard-encodings). The default encoding is platform dependent. Use 'iso8859_1' for rockyou. {Color.END}\",\n action=\"store\",\n default=None,\n )\n\n parser.add_argument(\n \"-o\",\n \"--output\",\n help=f\"{Color.INFORMATION}Output the cracked password to a new file.{Color.END}\",\n action=\"store\",\n )\n\n parser.add_argument(\n \"-c\",\n \"--copy\",\n help=f\"{Color.INFORMATION}Copy the password to the clipboard.{Color.END}\",\n action=\"store_true\",\n )\n\n return parser.parse_args()", "def check_valid_request_ca(self):\n\n self.check_valid_request_common()\n\n alg = self.get_POW().getSignatureAlgorithm()\n bc = self.get_POW().getBasicConstraints()\n eku = self.get_POW().getEKU()\n sia = self.get_POW().getSIA()\n\n if alg != rpki.oids.sha256WithRSAEncryption:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 has bad signature algorithm for CA: %s\" % alg)\n\n if bc is None or not bc[0] or bc[1] is not None:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA bad basicConstraints\")\n\n if eku is not None:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA EKU not allowed\")\n\n if sia is None:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA missing\")\n\n caRepository, rpkiManifest, signedObject, rpkiNotify = sia\n\n logger.debug(\"check_valid_request_ca(): sia: %r\", sia)\n\n if signedObject:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA must not have id-ad-signedObject\")\n\n if not caRepository:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA must have id-ad-caRepository\")\n\n if not any(uri.startswith(\"rsync://\") for uri in caRepository):\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA id-ad-caRepository contains no rsync URIs\")\n\n if any(uri.startswith(\"rsync://\") and not uri.endswith(\"/\") for uri in caRepository):\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA id-ad-caRepository does not end with slash\")\n\n if not rpkiManifest:\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA must have id-ad-rpkiManifest\")\n\n if not any(uri.startswith(\"rsync://\") for uri in rpkiManifest):\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA id-ad-rpkiManifest contains no rsync URIs\")\n\n if any(uri.startswith(\"rsync://\") and uri.endswith(\"/\") for uri in rpkiManifest):\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA id-ad-rpkiManifest ends with slash\")\n\n if any(not uri.startswith(\"http://\") and not uri.startswith(\"https://\") for uri in rpkiNotify):\n raise rpki.exceptions.BadPKCS10(\"PKCS #10 CA SIA id-ad-rpkiNotify neither HTTP nor HTTPS\")", "def parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument('--coe', action='store_true', help='creating coe files')\n return parser.parse_args()" ]
[ "0.7071083", "0.6448927", "0.64444983", "0.63485956", "0.6237446", "0.6178711", "0.61732894", "0.6083192", "0.6013771", "0.59875655", "0.5984441", "0.5983292", "0.5934362", "0.5875236", "0.58677095", "0.58173174", "0.58064055", "0.5790398", "0.5781913", "0.5750335", "0.5740302", "0.5721597", "0.5605422", "0.55791026", "0.55675447", "0.5567282", "0.55523837", "0.5460517", "0.54327935", "0.53926224", "0.5361464", "0.53598654", "0.5357013", "0.53555435", "0.5354666", "0.5323494", "0.53209996", "0.5315809", "0.53068346", "0.5302941", "0.5300006", "0.5263966", "0.52369744", "0.52325", "0.52283466", "0.5208991", "0.5192991", "0.5190959", "0.5172212", "0.51478577", "0.51441616", "0.5140278", "0.5138811", "0.5137863", "0.51263607", "0.5116356", "0.51162", "0.51162", "0.5110071", "0.5109341", "0.51066273", "0.509405", "0.50858927", "0.50785536", "0.5076114", "0.50721186", "0.50695056", "0.5065388", "0.5063359", "0.50614715", "0.5039639", "0.50366575", "0.50112647", "0.50112003", "0.5007794", "0.5007794", "0.500499", "0.5004912", "0.49902904", "0.4985721", "0.49740255", "0.49609396", "0.49584654", "0.49522728", "0.4948192", "0.49475604", "0.49436724", "0.4942189", "0.49415696", "0.49393913", "0.49331236", "0.49310616", "0.49202797", "0.49192572", "0.49127594", "0.4903473", "0.4898079", "0.48933005", "0.48923823", "0.48917994" ]
0.6887356
1
Load commandline arguments, output cert.
def sign_command(args): if args.files: die("Unexpected positional arguments") # Load certificate request if not args.request: die("Need --request") subject_csr = load_req(args.request) reset_info = None if args.reset: reset_info = info_from_args(args) # Load CA info if not args.ca_info: die("Need --ca-info") if args.ca_info.endswith('.csr'): issuer_obj = load_req(args.ca_info) else: issuer_obj = load_cert(args.ca_info) # Load CA private key issuer_key = load_key(args.ca_key, load_password(args.password_file)) if not same_pubkey(issuer_key, issuer_obj): die("--ca-private-key does not match --ca-info data") # Certificate generation cert = do_sign(subject_csr, issuer_obj, issuer_key, args.days, args.path_length, args.request, reset_info=reset_info) # Write certificate do_output(cert_to_pem(cert), args, 'x509')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main(cli_args):\n store_obj = cert_human_py3.CertChainStore.from_socket(\n host=cli_args.host, port=cli_args.port\n )\n\n print(store_obj.dump_json)", "def main():\n ssl_date_fmt = r'%b %d %H:%M:%S %Y %Z'\n #cert_file_name = os.path.join(os.path.dirname(__file__), \"testcert.pem\")\n\n parser = argparse.ArgumentParser(description='Parse a certificate and show days left')\n parser.add_argument('-v', '--verbose', action='store_true', help='show full certificate')\n parser.add_argument('cert', nargs='+', help='certifcate file(s)')\n args = parser.parse_args()\n for cert_file_name in args.cert:\n try:\n cert_dict = ssl._ssl._test_decode_cert(cert_file_name)\n serial = cert_dict['serialNumber']\n subject = dict(x[0] for x in cert_dict['subject'])\n issued_to = subject['commonName']\n time_left = datetime.datetime.strptime(cert_dict['notAfter'], ssl_date_fmt) - datetime.datetime.utcnow()\n if args.verbose:\n pp(cert_dict)\n ssl_expires_in(issued_to, serial, time_left)\n\n except Exception as error:\n print(\"Error decoding certificate: {:}\".format(error))", "def main():\n try:\n return run_sysca(sys.argv[1:])\n except InvalidCertificate as ex:\n die(str(ex))", "def info_from_args(args):\n return CertInfo(\n subject=parse_dn(args.subject),\n usage=parse_list(args.usage),\n alt_names=parse_list(args.san),\n ocsp_nocheck=args.ocsp_nocheck,\n ocsp_must_staple=args.ocsp_must_staple,\n ocsp_must_staple_v2=args.ocsp_must_staple_v2,\n ocsp_urls=parse_list(args.ocsp_urls),\n crl_urls=parse_list(args.crl_urls),\n issuer_urls=parse_list(args.issuer_urls),\n permit_subtrees=parse_list(args.permit_subtrees),\n exclude_subtrees=parse_list(args.exclude_subtrees),\n ca=args.CA,\n path_length=args.path_length)", "def req_command(args):\n if args.files:\n die(\"Unexpected positional arguments\")\n\n subject_info = info_from_args(args)\n\n if subject_info.ca:\n msg('Request for CA cert')\n else:\n msg('Request for end-entity cert')\n subject_info.show(msg_show)\n\n # Load private key, create signing request\n key = load_key(args.key, load_password(args.password_file))\n req = create_x509_req(key, subject_info)\n do_output(req_to_pem(req), args, 'req')", "def _set_arguments(self):\n cert_location = f\"dependencies{sep}certificates{sep}localuser.crt\"\n key_location = f\"dependencies{sep}certificates{sep}localuser.key\"\n assert Path(cert_location).exists(), (\n f\"The certificate isn't \"\n f\"present at location {Path(cert_location).absolute()}\"\n )\n assert Path(key_location).exists(), (\n f\"The certificate key isn't \"\n f\"present at location {Path(key_location).absolute()}\"\n )\n self._arguments = [\n (\n \"test-certificate-verify\",\n [\"-k\", key_location, \"-c\", cert_location],\n ),\n (\n \"test-sig-algs\",\n [],\n ),\n (\n \"test-clienthello-md5\",\n [],\n ),\n (\n \"test-tls13-pkcs-signature\",\n [],\n ),\n ]", "def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n client = holder.client\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.regionSslCertificates\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])", "def _Args(parser,\n include_l7_internal_load_balancing=False,\n support_managed_certs=False):\n parser.add_argument(\n '--description',\n help='An optional, textual description for the SSL certificate.')\n\n parser.display_info.AddCacheUpdater(\n flags.SslCertificatesCompleterBeta\n if include_l7_internal_load_balancing else flags.SslCertificatesCompleter)\n\n if support_managed_certs:\n managed_or_not = parser.add_group(\n mutex=True,\n required=True,\n help='Flags for managed or self-managed certificate. ')\n\n managed_or_not.add_argument(\n '--domains',\n metavar='DOMAIN',\n type=arg_parsers.ArgList(min_length=1),\n default=[],\n help=\"\"\"\\\n List of domains to create a managed certificate for.\n \"\"\")\n\n not_managed = managed_or_not.add_group('Flags for self-managed certificate')\n not_managed.add_argument(\n '--certificate',\n metavar='LOCAL_FILE_PATH',\n required=True,\n help=\"\"\"\\\n Path to a local certificate file to create a self-managed\n certificate. The certificate must be in PEM format. The certificate\n chain must be no greater than 5 certs long. The chain must include at\n least one intermediate cert.\n \"\"\")\n not_managed.add_argument(\n '--private-key',\n metavar='LOCAL_FILE_PATH',\n required=True,\n help=\"\"\"\\\n Path to a local private key file. The private key must be in PEM\n format and must use RSA or ECDSA encryption.\n \"\"\")\n else:\n parser.add_argument(\n '--certificate',\n required=True,\n metavar='LOCAL_FILE_PATH',\n help=\"\"\"\\\n Path to a local certificate file. The certificate must be in PEM\n format. The certificate chain must be no greater than 5 certs long. The\n chain must include at least one intermediate cert.\n \"\"\")\n\n parser.add_argument(\n '--private-key',\n required=True,\n metavar='LOCAL_FILE_PATH',\n help=\"\"\"\\\n Path to a local private key file. The private key must be in PEM\n format and must use RSA or ECDSA encryption.\n \"\"\")", "def main():\n licensify(_parse_args())", "def load_cert_chain(self, certfile, keyfile: Optional[Any] = ...):\n ...", "def __check_opts(self):\n self.ca_cert_file = os.environ['HOME'] + '/.cat_installer/ca.pem'\n self.pfx_file = os.environ['HOME'] + '/.cat_installer/user.p12'\n if not os.path.isfile(self.ca_cert_file):\n print(Messages.cert_error)\n sys.exit(2)", "def _ParseCertificateArguments(client, args):\n self_managed = None\n managed = None\n certificate_type = None\n if args.certificate:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.SELF_MANAGED\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n self_managed = client.messages.SslCertificateSelfManagedSslCertificate(\n certificate=certificate, privateKey=private_key)\n if args.domains:\n certificate_type = \\\n client.messages.SslCertificate.TypeValueValuesEnum.MANAGED\n managed = client.messages.SslCertificateManagedSslCertificate(\n domains=args.domains)\n return certificate_type, self_managed, managed", "def main(argv):\n\n valid = [\"proxy=\", \"myproxy=\", \"mail=\", \"send-mail=\", \"time=\", \"verbose\", \"help\"]\n ### // Default values\n proxy = os.getenv('X509_USER_PROXY')\n myproxy = False\n verbose = False\n mail = os.getenv('USER')\n sendMail = True\n time = 3\n\n try:\n opts, _args = getopt.getopt(argv, \"\", valid)\n except getopt.GetoptError as ex:\n print(\"Options: {}\\n\\nException: {}\".format(main.__doc__, str(ex)))\n sys.exit(1)\n\n ### // Handle arguments given in the command line\n for opt, arg in opts:\n if opt == \"--help\":\n print(main.__doc__)\n sys.exit(0)\n if opt == \"--proxy\":\n proxy = arg\n if proxy.startswith(\"~/\"):\n proxy = os.getenv('HOME') + proxy[1:]\n if not os.path.exists(proxy):\n print(\"Proxy File does not exist\")\n sys.exit(2)\n if opt == \"--mail\":\n mail = arg\n if opt == \"--myproxy\":\n myproxy = arg\n if opt == \"--send-mail\":\n sendMail = arg\n if opt == \"--time\":\n time = int(arg)\n if time < 1:\n print(\"Invalid time format. Check the options: {}\".format(main.__doc__))\n raise sys.exit(3)\n if opt == \"--verbose\":\n verbose = True\n\n command = [\"voms-proxy-info\", \"-file\", str(proxy)]\n p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n out, _err = p.communicate()\n proxyInfo = [line for line in out.split('\\n') if line]\n processTimeLeft(sendMail, verbose, proxyInfo, time, mail)\n\n if myproxy:\n os.environ[\"X509_USER_CERT\"] = proxy\n os.environ[\"X509_USER_KEY\"] = proxy\n command = [\"myproxy-info\", \"-v\", \"-l\", \"amaltaro\", \"-s\", \"myproxy.cern.ch\", \"-k\", \"amaltaroCERN\"]\n p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n out, err = p.communicate()\n proxyInfo = [line for line in out.split('\\n') if line]\n processTimeLeft(sendMail, verbose, proxyInfo, time, mail)", "def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n return _Run(args, holder, ssl_certificate_ref)", "def show_command(args):\n for fn in args.files:\n ext = os.path.splitext(fn)[1].lower()\n if ext == '.csr':\n cmd = ['openssl', 'req', '-in', fn, '-text']\n elif ext == '.crt':\n cmd = ['openssl', 'x509', '-in', fn, '-text']\n else:\n die(\"Unsupported file: %s\", fn)\n subprocess.check_call(cmd)", "def __init__(__self__,\n resource_name: str,\n args: CertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n return _Run(args, holder, ssl_certificate_ref)", "def add_certificate_arguments(parser):\n group = parser.add_argument_group(\"Certificate management\")\n group.add_argument(\n \"-sn\", \"--serial_number\",\n help=\"Serial number for the certificate\",\n type=int,\n default=1\n )\n group.add_argument(\n \"-d\", \"--duration\",\n help=\"Period of validity for certificate (seconds)\",\n type=int,\n default=60*60*24*(365*100+25)\n )", "def main(argv):\n\n\n parser = argparse.ArgumentParser(description='convert der to raw')\n parser.add_argument('-s','--secretkey_file', help='Secret key', required=True)\n parser.add_argument('-p','--publickey_file', help='Public key', required=True)\n args = parser.parse_args()\n\n secretkey_file = args.secretkey_file\n publickey_file = args.publickey_file\n\n\n privkey = SigningKey.from_der(open(secretkey_file).read())\n pubkey = VerifyingKey.from_der(open(publickey_file).read())\n\n open(secretkey_file[0:-4] + \".bin\", \"wb\").write(privkey.to_string())\n open(publickey_file[0:-4] + \".bin\", \"wb\").write(pubkey.to_string())", "def __init__(__self__,\n resource_name: str,\n args: ServerCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def AddCertificateFlag(parser, required=False):\n help_text = \"\"\"\\\n x509 PEM-encoded certificate that will be used by the replica to\n authenticate against the database server.\n \"\"\"\n parser.add_argument('--certificate', help=help_text, required=required)", "def read_arguments(argv):\n\tif argv[0] in ('1', '2'):\n\t\tconos_config['endpoint'] = endpoint[argv[0]]\n\telse:\n\t\tusage()\n\n\tif argv[1] in ('dev', 'test', 'int', 'prod'):\n\t\tconos_config['environment'] = argv[1]\n\t\tconos_config['sts_url'] = eval(argv[1] + '_sts_url')\n\t\tconos_config['aicuu_url'] = eval(argv[1] + '_aicuu_url')\n\telse:\n\t\tusage()\n\n\tif len(argv) == 6:\n\t\tconos_config['number_threads'] = '1'\n\telse:\n\t\tif argv[6] in ('1', '2', '3', '4', '5', '6', '7', '8'):\n\t\t\tconos_config['number_threads'] = argv[6]\n\t\telse:\n\t\t\tusage()\n\n\tconos_config['client_id'] = argv[2]\n\tconos_config['client_secret'] = argv[3]\n\tconos_config['input_file'] = argv[4]\n\tconos_config['output_file'] = argv[5]", "def initial_setup():\n\n if os.path.exists(cfg.ca_private_key_path()):\n pkey = _try_load_ca_private_key(cfg.ca_private_key_path())\n else:\n pkey = _generate_ca_private_key(cfg.ca_private_key_path())\n\n if os.path.exists(cfg.ca_cert_path()):\n _try_load_ca_cert(cfg.ca_cert_path())\n else:\n _generate_ca_cert(cfg.ca_cert_path(), pkey)", "def main():\n\n parser = argparse.ArgumentParser()\n parser.add_argument('-H', '--host', required=True)\n parser.add_argument('-p', '--port', default=443)\n parser.add_argument('-u', '--url', default='/')\n parser.add_argument('-c', '--cert', required=True)\n parser.add_argument('-k', '--key', required=True)\n parser.add_argument('-P', '--perfdata', action='append')\n args = parser.parse_args()\n\n csv = get_csv(args)\n rows = get_rows(csv)\n\n frontend_errors = get_frontend_errors(rows)\n backend_errors = get_backend_errors(rows)\n member_errors = get_member_errors(rows)\n perfdata = get_perfdata(args.perfdata, rows)\n\n code = NAGIOS_OK\n if member_errors:\n code = NAGIOS_WARNING\n if frontend_errors or backend_errors:\n code = NAGIOS_CRITICAL\n\n status = ['OK', 'WARNING', 'CRITICAL'][code]\n print '{} frontend errors {}; backend errors {}; member errors {} | {}'.\\\n format(status,\n ', '.join(frontend_errors) if frontend_errors else 'none',\n ', '.join(backend_errors) if backend_errors else 'none',\n ', '.join(member_errors) if member_errors else 'none',\n ' '.join(perfdata))\n\n sys.exit(code)", "def __init__(__self__,\n resource_name: str,\n args: SSLCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def certificate_data(prog):\n retval = Prog.RetVal.ok\n prog.log.info3(\"+++ generating certificate data (hashes)...\")\n for target in prog.target_list:\n uniq = []\n for t in target.tlsa:\n if t.params() in uniq:\n continue\n uniq += [ t.params() ]\n\n prog.log.info3(\n \" ++ tlsa: {}{}{}, request: {}\".format(t.usage, t.selector,\n t.matching,\n target.domain))\n try:\n data = get_data(prog, target.domain, t)\n for d in data:\n prog.log.info3(\n \" + cert: {}\\n + data: {}\".format(d[0], d[1]))\n\n # The only time we _don't_ print this, is if we are\n # printing the log info to stdout and the debug level\n # is 'debug':\n if not (prog.log.type == logging.LogType.stdout\n and prog.log.level == logging.LogLevel.debug):\n print(\"{} {} {} {} {} {}\".format(\n get_domain(prog, d[0]),\n t.usage, t.selector, t.matching, d[1], d[0]))\n\n except (Except.FunctionError, Except.InternalError,\n Except.DNSProcessingError) as ex:\n prog.log.error(\"{}: {}\".format(target.domain, ex.message))\n retval = Prog.RetVal.exit_failure\n continue\n\n return retval", "def cli():\n config, auth, execute_now = read_command_line_arguments()\n main(config, auth, execute_now)", "def main():\n # This is used to store the certificate filename\n cert = \"\"\n\n # Setup a signal handler to catch control-c and clean up the cert temp file\n # No way to catch sigkill so try not to do that.\n # noinspection PyUnusedLocal\n def sigint_handler(sig, frame): # pylint:disable=unused-argument\n \"\"\"Handle interrupt signals.\"\"\"\n if not args.cert:\n try:\n os.unlink(cert)\n except OSError: # pylint:disable=pointless-except\n pass\n print \"Exiting...\"\n sys.exit(0)\n\n parser = ArgumentParser('Remote APIC API Inspector and GUI Log Server')\n\n parser.add_argument('-a', '--apicip', required=False, default='8.8.8.8',\n help='If you have a multihomed system, where the ' +\n 'apic is on a private network, the server will ' +\n 'print the ip address your local system has a ' +\n 'route to 8.8.8.8. If you want the server to ' +\n 'print a more accurate ip address for the ' +\n 'server you can tell it the apicip address.')\n\n parser.add_argument('-c', '--cert', type=str, required=False,\n help='The server certificate file for ssl ' +\n 'connections, default=\"server.pem\"')\n\n parser.add_argument('-d', '--delete_imdata', action='store_true',\n default=False, required=False,\n help='Strip the imdata from the response and payload')\n\n parser.add_argument('-e', '--exclude', action='append', nargs='*',\n default=[], choices=['subscriptionRefresh',\n 'aaaRefresh',\n 'aaaLogout',\n 'HDfabricOverallHealth5min-0',\n 'topInfo', 'all'],\n help='Exclude certain types of common noise queries.')\n\n parser.add_argument('-i', '--indent', type=int, default=2, required=False,\n help='The number of spaces to indent when pretty ' +\n 'printing')\n\n parser.add_argument('-l', '--location', default='/apiinspector',\n required=False,\n help='Location that transaction logs are being ' +\n 'sent to, default=/apiinspector')\n\n parser.add_argument('-n', '--nice-output', action='store_true',\n default=False, required=False,\n help='Pretty print the response and payload')\n\n parser.add_argument('-p', '--port', type=int, required=False, default=8987,\n help='Local port to listen on, default=8987')\n\n parser.add_argument('-s', '--sslport', type=int, required=False,\n default=8443,\n help='Local port to listen on for ssl connections, ' +\n 'default=8443')\n\n parser.add_argument('-r', '--requests-log', action='store_true',\n default=False, required=False,\n help='Log server requests and response codes to ' +\n 'standard error')\n\n parser.add_argument('-t', '--title', default='SimpleAciUiLogServer',\n required=False,\n help='Change the name shown for this application ' +\n 'when accessed with a GET request')\n\n parser.add_argument('-ty', '--type', action='append', nargs='*',\n default=['all'], choices=['POST', 'GET', 'undefined',\n 'EventChannelMessage'],\n help='Limit logs to specific request types.')\n\n args = parser.parse_args()\n\n logging.basicConfig(level=logging.DEBUG,\n format='%(asctime)s %(levelname)s - \\n%(message)s')\n if args.exclude:\n # Flatten the list\n args.exclude = [val for sublist in args.exclude for val in sublist]\n\n if not args.location.startswith(\"/\"):\n args.location = \"/\" + str(args.location)\n\n if args.type:\n # Flatten the list\n args.type = [val for sublist in args.type for val in sublist]\n\n ThreadingSimpleAciUiLogServer.prettyprint = args.nice_output\n ThreadingSimpleAciUiLogServer.indent = args.indent\n ThreadingSimpleAciUiLogServer.strip_imdata = args.delete_imdata\n\n # Instantiate a http server\n http_server = ThreadingSimpleAciUiLogServer((\"\", args.port),\n log_requests=args.requests_log,\n location=args.location,\n excludes=args.exclude,\n app_name=args.title)\n\n if not args.cert:\n # Workaround ssl wrap socket not taking a file like object\n cert_file = tempfile.NamedTemporaryFile(delete=False)\n cert_file.write(SERVER_CERT)\n cert_file.close()\n cert = cert_file.name\n print(\"\\n+++WARNING+++ Using an embedded self-signed certificate for \" +\n \"HTTPS, this is not secure.\\n\")\n else:\n cert = args.cert\n\n # Instantiate a https server as well\n https_server = ThreadingSimpleAciUiLogServer((\"\", args.sslport),\n cert=cert,\n location=args.location,\n log_requests=args.requests_log,\n excludes=args.exclude,\n app_name=args.title)\n\n signal.signal(signal.SIGINT, sigint_handler) # Or whatever signal\n\n # Example of registering a function for a specific method. The funciton\n # needs to exist of course. Note: undefined seems to be the same as a\n # GET but the logging facility on the APIC seems to get in a state where\n # instead of setting the method properly it sets it to undefined.\n # These registered functions could then be used to take specific actions or\n # be silent for specific methods.\n # http_server.register_function(GET)\n # http_server.register_function(POST)\n # http_server.register_function(HEAD)\n # http_server.register_function(DELETE)\n # http_server.register_function(undefined)\n # http_server.register_function(EventChannelMessage)\n\n # This simply sets up a socket for UDP which has a small trick to it.\n # It won't send any packets out that socket, but this will allow us to\n # easily and quickly interogate the socket to get the source IP address\n # used to connect to this subnet which we can then print out to make for\n # and easy copy/paste in the APIC UI.\n ip_add = [(s.connect((args.apicip, 80)), s.getsockname()[0], s.close()) for\n s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1]\n\n print(\"Servers are running and reachable via:\\n\")\n print(\"http://\" + str(ip_add) + \":\" + str(args.port) + args.location)\n print(\"https://\" + str(ip_add) + \":\" + str(args.sslport) + args.location +\n \"\\n\")\n print(\"Make sure your APIC(s) are configured to send log messages: \" +\n \"welcome username -> Start Remote Logging\")\n print(\"Note: If you connect to your APIC via HTTPS, configure the \" +\n \"remote logging to use the https server.\")\n serve_forever([http_server, https_server])", "def get_ssl_certificate():", "def dcos_ca_bundle():\n resp = sdk_cmd.cluster_request('GET', '/ca/dcos-ca.crt')\n cert = resp.content.decode('ascii')\n assert cert is not None\n return cert", "def cli_arguments():\n\n parser = argparse.ArgumentParser(\n formatter_class=argparse.RawDescriptionHelpFormatter,\n usage=f\"\\n{Color.DETAIL}pdforce.py [-p <pdf>] [-w <wordlist>] [-e <encoding>] [-o <output>] [-c] [-h/--help]{Color.END}\",\n description=f\"{Color.EMPHASIS}{TITLE}\\nLightweight PDF password cracker. USE FOR LEGAL INTENTS ONLY.{Color.END}\",\n epilog=f\"{Color.EMPHASIS}Made by @poponealex - https://github.com/poponealex{Color.END}\",\n )\n\n parser.add_argument(\n \"-p\",\n \"--pdf\",\n type=str,\n help=f\"{Color.INFORMATION}Path to the pdf file.{Color.END}\",\n action=\"store\",\n default=\"\",\n )\n\n parser.add_argument(\n \"-w\",\n \"--wordlist\",\n type=str,\n help=f\"{Color.INFORMATION}Path to the wordlist.{Color.END}\",\n action=\"store\",\n default=\"\",\n )\n\n parser.add_argument(\n \"-e\",\n \"--encoding\",\n type=str,\n help=f\"{Color.INFORMATION}Specify an encoding for the wordlist (https://docs.python.org/3/library/codecs.html#standard-encodings). The default encoding is platform dependent. Use 'iso8859_1' for rockyou. {Color.END}\",\n action=\"store\",\n default=None,\n )\n\n parser.add_argument(\n \"-o\",\n \"--output\",\n help=f\"{Color.INFORMATION}Output the cracked password to a new file.{Color.END}\",\n action=\"store\",\n )\n\n parser.add_argument(\n \"-c\",\n \"--copy\",\n help=f\"{Color.INFORMATION}Copy the password to the clipboard.{Color.END}\",\n action=\"store_true\",\n )\n\n return parser.parse_args()", "def get_ssl_certificate() :", "def parse_command_line(args):\n try:\n opts, args = getopt.getopt(args[1:],\"ha:t:R:C:K:\",[\"address=\",\"transport=\", \"rootcert=\", \"clientcert=\", \"key=\"])\n except getopt.GetoptError as err:\n print str(err)\n logger.info(get_usage())\n sys.exit(2)\n\n\n \"\"\"\n * options:\n * -a, --address <network element address or FQDN>\n * -t, --transport <transport type> default is tls\n * -C, --clientcert <client certificate file>\n * -K, --clientkey <client private key file>\n * -R, --rootcert <root certificates file>\n \"\"\"\n for option, arg in opts:\n if option == '-h':\n logger.info(get_usage())\n sys.exit()\n elif option in (\"-a\", \"--address\"):\n global switchIP\n switchIP = arg\n elif option in (\"-t\", \"--transport\"):\n global transport\n transport = arg\n elif option in (\"-R\", \"--rootcert\"):\n global root_cert_path\n root_cert_path = arg\n elif option in (\"-C\", \"--clientcert\"):\n global client_cert_path\n client_cert_path = arg\n elif option in (\"-K\", \"--key\"):\n global client_key_path\n client_key_path = arg\n global username\n username = raw_input('Enter Username : ')\n global password\n password = getpass.getpass('Enter Password : ')\n\n if(switchIP==None):\n logger.error(get_usage())\n return False\n\n return True", "def cert(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cert\")", "def cert(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cert\")", "def __init__(__self__,\n resource_name: str,\n args: OriginCaCertificateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\n \"--download_path\",\n default=None,\n help=\"Free or auth\"\n )\n parser.add_argument(\n \"--download_type\", default=\"free\", help=\"Free or auth\"\n )\n parser.add_argument(\n \"--ipversion\",\n default=\"ipv4\", help=\"IP Version format \"\n )\n parser.add_argument(\n \"--format\",\n default=\"csv\",\n help=\"DB AVAILABLE FORMATS CSV or BIN\"\n )\n parser.add_argument(\"--product\", default=\"db1\", help=\"PRODUCT\")\n parser.add_argument(\n \"--token\",\n help=\"token used in order to authenticate\"\n \"in case of downloading the auth required DBs\"\n )\n parser.add_argument(\"--unzip\", default=True, help=\"\")\n parser.add_argument(\"--numbertoipv4\", default=True, help=\"\")\n\n args = parser.parse_args()\n\n cli_util = CliUtil(\n **{x: y for x, y in args._get_kwargs()}\n )\n cli_util.exec()\n return 0", "def __init__(self, proxy_only = False):\n self.key_file = None\n self.cert_file = None\n self.ca_path = None\n self.key_pass = None\n\n path = os.getenv(\"X509_CERT_DIR\", None)\n if path and os.path.exists(path):\n self.ca_path = path\n\n if not self.ca_path:\n path = \"/etc/grid-security/certificates\"\n if os.path.exists(path):\n self.ca_path = path\n\n path = os.getenv(\"X509_USER_PROXY\", None)\n if path and os.path.exists(path):\n self.key_file = self.cert_file = path\n\n if not self.key_file:\n path = os.getenv(\"X509_USER_KEY\", None)\n if path and os.path.exists(path):\n self.key_file = path\n\n if not self.cert_file:\n path = os.getenv(\"X509_USER_CERT\", None)\n if path and os.path.exists(path):\n self.cert_file = path\n\n if not self.key_file:\n path = os.getenv(\"HOME\") + \"/.globus/userkey.pem\"\n if os.path.exists(path):\n self.key_file = path\n\n if not self.cert_file:\n path = os.getenv(\"HOME\") + \"/.globus/usercert.pem\"\n if os.path.exists(path):\n self.cert_file = path\n\n if not self.ca_path or not os.path.exists(self.ca_path):\n raise RuntimeError(\"no certificate directory found\")\n\n if not self.key_file or not os.path.exists(self.key_file):\n raise RuntimeError(\"no certificate private key file found\")\n\n if not self.cert_file or not os.path.exists(self.cert_file):\n raise RuntimeError(\"no certificate public key file found\")\n\n if not proxy_only and self.key_file != self.cert_file:\n self.key_pass = getpass(\"Password for %s: \" % self.key_file)", "def cmd_entry():\n import argparse\n\n parser = argparse.ArgumentParser(\n description=\"Web based frontend to the health record system databaser\"\n )\n parser.add_argument('-c', '--config', required=True, help=\"Config file to load\")\n args = parser.parse_args()\n\n main(args.config)", "def _Run(args, holder, ssl_certificate_ref):\n client = holder.client\n\n certificate_type, self_managed, managed = _ParseCertificateArguments(\n client, args)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n project=ssl_certificate_ref.project)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n collection = client.apitools_client.regionSslCertificates\n else:\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])", "def cmd(\n ctx,\n url,\n prompt,\n include_paths,\n include_urls,\n include_only_ca,\n export_file,\n update_env,\n):\n client = ctx.obj.create_client(url=url, key=None, secret=None)\n export_file = pathify_export_file(client=client, export_file=export_file)\n\n with ctx.obj.exc_wrap(wraperror=ctx.obj.wraperror):\n includes = []\n\n for url in include_urls:\n includes += from_url(url=url, split=False, ca_only=include_only_ca)\n\n for path in include_paths:\n includes += from_path(path=path, split=False, ca_only=include_only_ca)\n\n chain = client.HTTP.get_cert_chain()\n leaf_cert, intm_certs = split_leaf(chain=chain)\n prompt = confirm_cert(prompt=prompt, cert=leaf_cert)\n handle_export(\n data=chain + includes,\n export_file=export_file,\n export_backup=True,\n export_format=\"pem\",\n )\n handle_update_env(update_env=update_env, export_file=export_file)\n\n ctx.exit(0)", "def main():\n\n args = parse_args()\n _crypt = crypt_data(args.key)\n\n if args.file and os.path.exists(args.file):\n _file = open(args.file, 'r').read()\n with open(args.file, 'w') as crypt_file:\n if args.encrypt:\n crypt_file.write(_crypt.encrypt(_file).decode())\n if args.decrypt:\n try:\n crypt_file.write(_crypt.decrypt(_file).decode())\n except binascii.Error:\n print(\"This do not contaim a base64 data.\")\n\n if args.string:\n if args.encrypt:\n print(_crypt.encrypt(args.string).decode())\n try:\n if args.decrypt:\n print(_crypt.decrypt(args.string).decode())\n except binascii.Error:\n print(\"This string is not a base64 string.\")", "def main(args):\n if args.pem is not None:\n if args.session:\n print(\"Simulating inventory collection with session-based API access...\")\n spc1 = rest.Space(args.space_URL,\n cert=(args.pem, args.key),\n use_session=True,\n profile_file='/tmp/api-responses-session.csv')\n collect_inv(spc1, args.threads)\n spc1.logout()\n else:\n print(\"Simulating inventory collection with non-session-based API access...\")\n spc2 = rest.Space(args.space_URL,\n cert=(args.pem, args.key),\n use_session=False,\n profile_file='/tmp/api-responses.csv')\n collect_inv(spc2, args.threads)\n else:\n if args.session:\n print(\"Simulating inventory collection with session-based API access...\")\n spc1 = rest.Space(args.space_URL,\n args.user, args.passwd,\n use_session=True,\n profile_file='/tmp/api-responses-session.csv')\n collect_inv(spc1, args.threads)\n spc1.logout()\n else:\n print(\"Simulating inventory collection with non-session-based API access...\")\n spc2 = rest.Space(args.space_URL,\n args.user, args.passwd,\n use_session=False,\n profile_file='/tmp/api-responses.csv')\n collect_inv(spc2, args.threads)", "def cli(args): # noqa; pylint: disable=unused-argument", "def main(args):\n cli = CLI()\n # Check arguments\n cli.parse_arguments(args)", "def initialise(self, args, environ):", "def req_handler(args):\n key = _get_key(args)\n subject = get_subject_arguments()\n req = create_certificate_request(key, subject=subject, file_name=args.req_out)\n if not args.req_out:\n print(print_certificate_request(req))\n return req", "def main(args):", "def main(args):", "def selfsign_command(args):\n if args.files:\n die(\"Unexpected positional arguments\")\n\n subject_info = info_from_args(args)\n\n if subject_info.ca:\n msg('Request for CA cert')\n else:\n msg('Request for end-entity cert')\n subject_info.show(msg_show)\n\n # Load private key, create signing request\n key = load_key(args.key, load_password(args.password_file))\n subject_csr = create_x509_req(key, subject_info)\n\n # sign created request\n cert = do_sign(subject_csr, subject_csr, key, args.days, args.path_length, '<selfsign>')\n do_output(cert_to_pem(cert), args, 'x509')", "def configureSSL(domainName,dry=False):\n \n #enable ssl mod\n execute(subprocess.call,[\"a2enmod\",\"ssl\"],dry=dry)\n restartApache(dry=dry)\n \n #create input string for openssl command\n inputStr='CA\\nNova Scotia\\nHalifax\\nCompute Canada\\nACENET\\n'+domainName+'\\[email protected]\\n'\n \n #create ssl cert\n #Note that dry is fixed to be False, creating the cert doesn't really cause a problem except \n #it might overwrite an existing cert, and if it isn't actually executed the following steps will not be able to execute\n p=execute(subprocess.Popen,[\"openssl\",\"req\",\"-x509\",\"-nodes\"\n ,\"-days\",\"3650\"\n ,\"-newkey\",\"rsa:2048\"\n ,\"-keyout\",\"/etc/ssl/private/server.key\"\n ,\"-out\",\"/etc/ssl/certs/server.crt\"]\n ,stdout=subprocess.PIPE,stdin=subprocess.PIPE,stderr=subprocess.STDOUT,dry=dry)\n \n #have to handle dry runs in a special way as this command (dry or not) \n #depends on p not being None\n if not dry:\n output=execute(p.communicate,input=inputStr.encode('utf-8'),dry=dry)[0]\n else:\n print(\"p.communicate(input=\"+inputStr+\")\")\n \n #Set correct ownership and permission of key\n execute(subprocess.call,[\"sudo\",\"chown\",\"root:ssl-cert\",\"/etc/ssl/private/server.key\"],dry=dry)\n execute(subprocess.call,[\"sudo\",\"chmod\",\"640\",\"/etc/ssl/private/server.key\"],dry=dry)\n \n #comment out any previous settings\n execute(commentOutLineMatching,\".*SSLCertificateFile.*\",\"/etc/apache2/sites-available/default-ssl.conf\",dry=dry)#not matching\n execute(commentOutLineMatching,\".*SSLCertificateKeyFile.*\",\"/etc/apache2/sites-available/default-ssl.conf\",dry=dry)#not matching\n execute(commentOutLineMatching,\".*SSLCertificateChainFile.*\",\"/etc/apache2/sites-available/default-ssl.conf\",dry=dry)#not matching\n \n #add settings before for improved security </VirtualHost>\n execute(replaceStrInFileRe,\"</VirtualHost>\"\n ,\"\\tSSLCertificateFile /etc/ssl/certs/server.crt\\n\"\n +\"\\t\\tSSLCertificateKeyFile /etc/ssl/private/server.key\\n\"\n +\"\\t\\tSSLCertificateChainFile /etc/ssl/certs/server.crt\\n\"\n +\"\\t\\tServerName \"+domainName+\"\\n\"\n +\"\\t\\tServerAlias www.\"+domainName+\"\\n\"\n +\"\\t\\tSSLProtocol all -SSLv2 -SSLv3\\n\"\n +\"\\t\\tSSLCipherSuite HIGH:MEDIUM:!aNULL:!MD5:!SEED:!IDEA:!RC4\\n\"\n +\"\\t\\tSSLHonorCipherOrder on\\n\"\n +\"\\t</VirtualHost>\",\"/etc/apache2/sites-available/default-ssl.conf\",dry=dry)\n \n #add redirect to https\n execute(replaceStrInFileRe,\"</VirtualHost>\"\n ,\"\\tRedirect permanent / https://\"+domainName+\"/\\n</VirtualHost>\\n\"\n ,\"/etc/apache2/sites-available/000-default.conf\",dry=dry)\n \n #enable ssl on our virtual host\n execute(subprocess.call,[\"a2ensite\",\"default-ssl.conf\"])\n execute(subprocess.call,[\"service\",\"apache2\",\"restart\"])", "def main(args):\n\n if args['verbose']:\n logging.basicConfig(level=logging.DEBUG)\n else:\n if args['quiet']:\n logging.basicConfig(level=logging.ERROR)\n else:\n logging.basicConfig(level=logging.WARNING)\n\n # unpack args\n\n json_file = args['JSONfile']\n data_dir = args['data_directory']\n temp_file = args['tmp']\n release = args['release']\n\n if json_file:\n json_data = get_json_data(json_file)\n else:\n logging.log(logging.DEBUG, \"Preparing to download JSONfile\")\n if os.path.isfile(temp_file):\n logging.log(logging.WARNING, \"Removing file %s\" % temp_file)\n os.remove(temp_file)\n logging.log(logging.DEBUG, \"Issuing wget for JSON file\")\n args = ['wget', 'https://security-tracker.debian.org/tracker/data/json',\n '-O', temp_file]\n if os.path.isdir('/etc/ssl'):\n if os.path.isdir('/etc/ssl/ca-debian'):\n args.insert(1, '--ca-directory=/etc/ssl/ca-debian')\n call(args)\n logging.log(logging.DEBUG, \"File %s received\" % temp_file)\n json_data = get_json_data(temp_file)\n if os.path.isfile(temp_file):\n logging.log(logging.DEBUG, \"Removing file %s\" % temp_file)\n os.remove(temp_file)\n\n parseJSON(json_data, release)\n parsedirs(data_dir, re.compile('^dsa.+\\.data$'), 2, release)\n parsedirs(data_dir, re.compile('^dla.+\\.data$'), 2, release)\n logging.log(logging.INFO, \"Finished parsing JSON data\")\n printdsas(ovals)", "def command_line_start(argv, program_name):\n cl_parser = argparse.ArgumentParser(description='Tinkerforge Data Logger')\n\n cl_parser.add_argument('config_file', help=\"Path to the configuration file\")\n cl_parser.add_argument('-v', action=\"store_true\", dest=\"validate\",\n help=\"Just process the validation of the configuration file\")\n\n results = cl_parser.parse_args(argv)\n\n arguments_map = {}\n arguments_map[CONSOLE_CONFIG_FILE] = results.config_file\n arguments_map[CONSOLE_VALIDATE_ONLY] = results.validate\n\n return arguments_map", "def load(args):\n subprocess.check_call([\"/bin/launchctl\", \"load\"] + values.get(args))", "def setup_request_commandline() -> Request:\r\n parser = argparse.ArgumentParser()\r\n parser.add_argument(\"key\", help=\"The key to use when encrypting or \"\r\n \"decrypting. This needs to be of \"\r\n \"length 8, 16 or 24\")\r\n parser.add_argument(\"-s\", \"--string\", help=\"The string that needs to be \"\r\n \"encrypted or decrypted\")\r\n parser.add_argument(\"-f\", \"--file\", help=\"The text file that needs to be\"\r\n \"encrypted or decrypted\")\r\n parser.add_argument(\"-o\", \"--output\", default=\"print\",\r\n help=\"The output of the program. This is 'print' by \"\r\n \"default, but can be set to a file name as well.\")\r\n parser.add_argument(\"-m\", \"--mode\", default=\"en\",\r\n help=\"The mode to run the program in. If 'en' (default)\"\r\n \" then the program will encrypt, 'de' will cause \"\r\n \"the program to decrypt\")\r\n try:\r\n args = parser.parse_args()\r\n request = Request()\r\n request.encryption_state = CryptoMode(args.mode)\r\n request.data_input = args.string\r\n request.input_file = args.file\r\n request.output = args.output\r\n request.key = args.key\r\n print(request)\r\n return request\r\n except Exception as e:\r\n print(f\"Error! Could not read arguments.\\n{e}\")\r\n quit()", "def generate(name, domain, country, state, locale, email,\n keytype, keylength):\n if not domain:\n logger.error(\n \"ctl:info:generate\", \"Choose a fully-qualified domain name of the \"\n \"certificate. Must match a domain present on the system\"\n )\n domain = click.prompt(\"Domain name\")\n if not country:\n logger.info(\n \"ctl:cert:generate\",\n \"Two-character country code (ex.: 'US' or 'CA')\"\n )\n country = click.prompt(\"Country code\")\n if not state:\n state = click.prompt(\"State/Province\")\n if not locale:\n locale = click.prompt(\"City/Town/Locale\")\n if not email:\n email = click.prompt(\"Contact email [optional]\")\n try:\n cmd = client().certificates.generate\n job, data = cmd(\n name, domain, country, state, locale, email, keytype, keylength)\n handle_job(job)\n except Exception as e:\n raise CLIException(str(e))", "def main():\n args = parse_args()\n process_args(args)", "def do_genconfig(args):\n\n print(\"========= DEFAULT ========\")\n debug = utils.get_input(\n \"Enable agent in debug mode [y/N]: \") or 'n'\n retry_interval = utils.get_input(\n \"Type the polling interval in seconds for daemon to manage the nodes: \")\n batch_publishing_interval = utils.get_input(\n \"Type the publishing interval in seconds for daemon to push the metrics: \")\n refresh_interval = utils.get_input(\n \"Type the polling interval in seconds to get health status directly from OneView: \")\n scmb_certificate_dir = utils.get_input(\n \"Type the certificates directory to register in OneView SCMB [/var/run/oneview-monasca]: \")\n auth_retry_limit = utils.get_input(\n \"Type the maximum number of attempts to try authenticate in REST API: \")\n\n debug = 'false' if debug == 'n' else 'true'\n retry_interval = retry_interval if retry_interval else \"300\"\n refresh_interval = refresh_interval if refresh_interval else \"180\"\n batch_publishing_interval = batch_publishing_interval if batch_publishing_interval else \"60\"\n\n auth_retry_limit = auth_retry_limit if auth_retry_limit else \"5\"\n scmb_certificate_dir = scmb_certificate_dir if scmb_certificate_dir else \"/var/run/oneview-monasca\"\n\n scmb_certificate_dir = os.path.realpath(os.path.expanduser(scmb_certificate_dir))\n utils.makedirs(scmb_certificate_dir)\n\n print(\"========= Openstack =========\")\n auth_url = utils.get_input(\"Type the Keystone url for authentication: \")\n auth_user = utils.get_input(\"Type the name of your OpenStack user: \")\n auth_password = getpass.getpass(\"Type the password for your OpenStack user: \")\n auth_tenant_name = utils.get_input(\"Type the tenant name that the OpenStack user will be authenticated: \")\n monasca_api_version = utils.get_input(\"Type a version of Monasca API that you want to use [2_0]: \")\n\n monasca_api_version = monasca_api_version if monasca_api_version else \"2_0\"\n\n print(\"========= OneView =========\")\n oneview_manager_url = utils.get_input(\"Type the manager_url for the OneView services: \")\n oneview_username = utils.get_input(\"Type your OneView username: \")\n oneview_password = getpass.getpass(\"Type your OneView user's password: \")\n oneview_insecure = utils.get_input(\"Would you like to allow insecure connections to OneView? [Y/n]: \") or \"Y\"\n max_polling_attempts = utils.get_input(\"Max polling attempts OneView requests: \")\n tls_cacert_file = utils.get_input(\"Path to your CA OneView certificate file, if any: \")\n\n oneview_host = utils.extract_domain_from_service_url(oneview_manager_url)\n oneview_insecure = \"true\" if oneview_insecure.lower() == 'y' else \"false\"\n max_polling_attempts = max_polling_attempts if max_polling_attempts else \"15\"\n\n fault_tolerance_enable = False\n group_name = coordinator_url = None\n while True:\n create = utils.get_input(\"Would you like to enable fault tolerance in the agent? [Y/n] \") or 'y'\n\n if create.lower() == 'y':\n print(\"========= Tooz =========\")\n\n group_name = utils.get_input(\"The group name for tooz configuration: \")\n coordinator_url = utils.get_input(\"The coordinator url for tooz configuration: \")\n fault_tolerance_enable = True\n break\n elif create.lower() == 'n':\n break\n else:\n print(\"Invalid option.\\n\")\n\n config_drivers = {}\n try:\n names = utils.list_names_driver(const.NAMESPACE_DISCOVERY_NODES, log=False)\n except Exception as ex:\n print('\\nCannot load installed drivers - Error caused by %s\\n' % str(ex))\n names = []\n\n for name in names:\n try:\n conf = utils.load_class_by_alias(\n const.NAMESPACE_DISCOVERY_NODES, name, log=False).genconfig()\n\n config_drivers[name.split('_')[-1]] = conf\n except Exception as ex:\n print('\\nCannot generating config file session to driver: %s - Error caused by %s\\n' % (name, str(ex)))\n\n # Write Configuration file #\n config = ConfigParser()\n config.set(\"DEFAULT\", \"debug\", debug)\n config.set(\"DEFAULT\", \"retry_interval\", retry_interval)\n config.set(\"DEFAULT\", \"periodic_refresh_interval\", refresh_interval)\n config.set(\"DEFAULT\", \"batch_publishing_interval\", batch_publishing_interval)\n\n config.set(\"DEFAULT\", \"auth_retry_limit\", auth_retry_limit)\n config.set(\"DEFAULT\", \"scmb_certificate_dir\", scmb_certificate_dir)\n\n if fault_tolerance_enable:\n config.add_section(\"tooz\")\n config.set(\"tooz\", \"group_name\", group_name)\n config.set(\"tooz\", \"coordinator_url\", coordinator_url)\n\n config.add_section(\"openstack\")\n config.set(\"openstack\", \"auth_url\", auth_url)\n config.set(\"openstack\", \"auth_user\", auth_user)\n config.set(\"openstack\", \"auth_password\", auth_password)\n config.set(\"openstack\", \"auth_tenant_name\", auth_tenant_name)\n config.set(\"openstack\", \"monasca_api_version\", monasca_api_version)\n\n config.add_section(\"oneview\")\n config.set(\"oneview\", \"host\", oneview_host)\n config.set(\"oneview\", \"manager_url\", oneview_manager_url)\n config.set(\"oneview\", \"username\", oneview_username)\n config.set(\"oneview\", \"password\", oneview_password)\n config.set(\"oneview\", \"allow_insecure_connections\", oneview_insecure)\n config.set(\"oneview\", \"max_polling_attempts\", max_polling_attempts)\n config.set(\"oneview\", \"tls_cacert_file\", tls_cacert_file)\n\n for driver in config_drivers:\n config.add_section(driver)\n for option, value in config_drivers[driver].items():\n config.set(driver, option, value)\n\n if not args.config_file:\n args.config_file = '~' + os.path.sep + 'oneview_monasca.conf'\n\n filename = utils.get_input(\n \"Type the path of the new configuration file [%s]: \" % args.config_file) or args.config_file\n full_filename = os.path.realpath(os.path.expanduser(filename))\n\n config_dir = os.path.dirname(full_filename)\n utils.makedirs(config_dir)\n\n with open(full_filename, 'w') as configfile:\n config.write(configfile)\n print(\"======\\nFile created successfully on '%s'!\\n======\" % filename)", "def commandline_options(args):\n # -------------------------------------------------------------------------------\n parser = argparse.ArgumentParser(\n description=\"Query and parse the caseroot files to gather metadata information\"\n \" that can be posted to the CESM experiments database.\"\n \" \"\n \" CMIP6 experiment case names must be reserved already in the\"\n \" experiment database. Please see:\"\n \" https://csesgweb.cgd.ucar.edu/expdb2.0 for details.\"\n )\n\n CIME.utils.setup_standard_logging_options(parser)\n\n parser.add_argument(\n \"--user\",\n dest=\"user\",\n type=str,\n default=None,\n required=True,\n help=\"User name for SVN CESM developer access (required)\",\n )\n\n parser.add_argument(\n \"--password\",\n dest=\"password\",\n action=PasswordPromptAction,\n default=\"\",\n required=True,\n help=\"Password for SVN CESM developer access (required)\",\n )\n\n parser.add_argument(\n \"--caseroot\",\n nargs=1,\n required=False,\n help=\"Fully quailfied path to case root directory (optional). \"\n \"Defaults to current working directory.\",\n )\n\n parser.add_argument(\n \"--workdir\",\n nargs=1,\n required=False,\n help=\"Fully quailfied path to directory for storing intermediate \"\n \"case files. A sub-directory called \"\n \"archive_temp_dir is created, populated \"\n \"with case files, and posted to the CESM experiments database and \"\n 'SVN repository at URL \"{0}\". '\n \"This argument can be used to archive a caseroot when the user \"\n \"does not have write permission in the caseroot (optional). \"\n \"Defaults to current working directory.\".format(_svn_expdb_url),\n )\n\n parser.add_argument(\n \"--expType\",\n dest=\"expType\",\n nargs=1,\n required=True,\n choices=_exp_types,\n help=\"Experiment type. For CMIP6 experiments, the case must already \"\n \"exist in the experiments database at URL \"\n ' \"http://csegweb.cgd.ucar.edu/expdb2.0\" (required). '\n 'Must be one of \"{0}\"'.format(_exp_types),\n )\n\n parser.add_argument(\n \"--title\",\n nargs=1,\n required=False,\n default=None,\n help=\"Title of experiment (optional).\",\n )\n\n parser.add_argument(\n \"--ignore-logs\",\n dest=\"ignore_logs\",\n action=\"store_true\",\n help=\"Ignore updating the SVN repository with the caseroot/logs files. \"\n \"The experiments database will be updated (optional).\",\n )\n\n parser.add_argument(\n \"--ignore-timing\",\n dest=\"ignore_timing\",\n action=\"store_true\",\n help=\"Ignore updating the the SVN repository with caseroot/timing files.\"\n \"The experiments database will be updated (optional).\",\n )\n\n parser.add_argument(\n \"--ignore-repo-update\",\n dest=\"ignore_repo_update\",\n action=\"store_true\",\n help=\"Ignore updating the SVN repository with all the caseroot files. \"\n \"The experiments database will be updated (optional).\",\n )\n\n parser.add_argument(\n \"--add-files\",\n dest=\"user_add_files\",\n required=False,\n help=\"Comma-separated list with no spaces of files or directories to be \"\n \"added to the SVN repository. These are in addition to the default added \"\n \"caseroot files and directories: \"\n '\"{0}, *.xml, user_nl_*\" (optional).'.format(_archive_list),\n )\n\n parser.add_argument(\n \"--dryrun\",\n action=\"store_true\",\n help=\"Parse settings and print what actions will be taken but \"\n \"do not execute the action (optional).\",\n )\n\n parser.add_argument(\n \"--query_cmip6\",\n nargs=2,\n required=False,\n help=\"Query the experiments database global attributes \"\n \"for specified CMIP6 casename as argument 1. \"\n \"Writes a json formatted output file, specified by argument 2, \"\n \"to subdir archive_files (optional).\",\n )\n\n parser.add_argument(\n \"--test-post\",\n dest=\"test_post\",\n action=\"store_true\",\n help=\"Post metadata to the test expdb2.0 web application server \"\n 'at URL \"http://csegwebdev.cgd.ucar.edu/expdb2.0\". '\n \"No --test-post argument defaults to posting metadata to the \"\n \"production expdb2.0 web application server \"\n 'at URL \"http://csegweb.cgd.ucar.edu/expdb2.0\" (optional).',\n )\n\n opts = CIME.utils.parse_args_and_handle_standard_logging_options(args, parser)\n\n return opts", "def parse_args(argv):\n\n parser = argparse.ArgumentParser(description='Fetch the requested report from App Nexus and save it to file system.')\n\n parser.add_argument('report_request', help='Path to JSON file that contains the report request.')\n parser.add_argument('-c', '--config', help='Path to JSON file that contains the keys \"api_endpoint\", \"user\" and \"pass\". If this parameter is not given, env vars APPNEXUS_API_ENDPOINT, APPNEXUS_USER and APPNEXUS_PASS must be set.')\n parser.add_argument('-d', '--save_dir', default='', help='The directory to save the report CSV. Default is current directory.')\n parser.add_argument('-I', '--no-interaction', action='store_true', help='Whether to ask confirmation before fetching report.')\n parser.add_argument('-s', '--start', help='Value for \"start_date\" parameter of report request.')\n parser.add_argument('-e', '--end', help='Value for \"end_date\" parameter of report request.')\n parser.add_argument('-i', '--interval', help='Value for \"report_interval\" parameter of report request.')\n parser.add_argument('--debug', action='store_true', help='Whether to print extra debug information or not.')\n\n args = parser.parse_args(argv[1:])\n\n\n if args.config:\n args.config = json.load(open(args.config, 'r'))\n\n elif (\n os.environ.get('APPNEXUS_API_ENDPOINT') and\n os.environ.get('APPNEXUS_USER') and\n os.environ.get('APPNEXUS_PASS')\n ):\n args.config = {\n 'api_endpoint': os.environ['APPNEXUS_API_ENDPOINT'],\n 'user' : os.environ['APPNEXUS_USER'],\n 'pass' : os.environ['APPNEXUS_PASS']\n }\n\n else:\n print \"\"\"\n You must either provide a --config parameter or\n set the env vars APPNEXUS_API_ENDPOINT, APPNEXUS_USER and APPNEXUS_PASS!\n Call this script with the --help option for more information.\n \"\"\"\n\n sys.exit(1)\n\n\n return args", "def setup_request_commandline() -> Request:\n parser = argparse.ArgumentParser()\n parser.add_argument(\"key\", help=\"The key to use when encrypting or \"\n \"decrypting. This needs to be of \"\n \"length 8, 16 or 24\")\n parser.add_argument(\"-s\", \"--string\", help=\"The string that needs to be \"\n \"encrypted or decrypted\")\n parser.add_argument(\"-f\", \"--file\", help=\"The text file that needs to be\"\n \"encrypted or decrypted\")\n parser.add_argument(\"-o\", \"--output\", default=\"print\",\n help=\"The output of the program. This is 'print' by \"\n \"default, but can be set to a file name as well.\")\n parser.add_argument(\"-m\", \"--mode\", default=\"en\",\n help=\"The mode to run the program in. If 'en' (default)\"\n \" then the program will encrypt, 'de' will cause \"\n \"the program to decrypt\")\n try:\n args = parser.parse_args()\n request = Request()\n request.encryption_state = CryptoMode(args.mode)\n request.data_input = args.string\n request.input_file = args.file\n request.output = args.output\n request.key = args.key\n print(request)\n return request\n except Exception as e:\n print(f\"Error! Could not read arguments.\\n{e}\")\n quit()", "def main() -> None:\n parser = argparse.ArgumentParser()\n parser.add_argument('-c', '--configfile',\n default=os.path.join(os.getcwd(), 'downloader.toml'),\n help=\"Configuration file to load settings from (default: %(default)s).\")\n parser.add_argument('-n', '--name',\n default=platform.node(),\n help=\"Host-specific name of the downloader (default: %(default)s).\")\n parser.add_argument('-p', '--print-config',\n action=\"store_true\", default=False,\n help=\"Print the configuration and exit.\")\n args = parser.parse_args()\n\n conf = Config.from_configfile(args.name, args.configfile)\n\n if args.print_config:\n print(conf)\n sys.exit(0)\n\n logging.basicConfig(format='%(levelname)-7s %(asctime)s %(message)s',\n level=logging.INFO, datefmt=\"%Y-%m-%d %H:%M:%S\")\n\n run(conf)", "def run(self, line):\r\n if os.name == 'nt':\r\n if not ctypes.windll.shell32.IsUserAnAdmin() != 0:\r\n self.app.typepath.adminpriv = False\r\n elif not os.getuid() == 0:\r\n self.app.typepath.adminpriv = False\r\n\r\n nargv = []\r\n curr = []\r\n argfound = False\r\n\r\n if \"--version\" in line or \"-V\" in line:\r\n sys.stdout.write(\"\"\"%(progname)s %(version)s\\n\"\"\" % \\\r\n {'progname': versioning.__longname__, 'version': \\\r\n versioning.__version__})\r\n sys.stdout.flush()\r\n sys.exit(self.retcode)\r\n\r\n else:\r\n for argument in enumerate(line):\r\n if not argfound and not argument[1].startswith('-'):\r\n nargv = line[argument[0]:]\r\n break\r\n else:\r\n argfound = False\r\n\r\n if argument[1] == \"-c\":\r\n argfound = True\r\n\r\n curr.append(argument[1])\r\n\r\n (self.opts, _) = self.parser.parse_args(curr)\r\n\r\n try:\r\n Encryption.encode_credentials('test')\r\n self.app.set_encode_funct(Encryption.encode_credentials)\r\n self.app.set_decode_funct(Encryption.decode_credentials)\r\n self.encoding = True\r\n except redfish.hpilo.risblobstore2.ChifDllMissingError:\r\n self.encoding = False\r\n\r\n if self.opts.config is not None and len(self.opts.config) > 0:\r\n if not os.path.isfile(self.opts.config):\r\n self.retcode = ReturnCodes.CONFIGURATION_FILE_ERROR\r\n sys.exit(self.retcode)\r\n\r\n self.app.config_file = self.opts.config\r\n\r\n self.app.config_from_file(self.app.config_file)\r\n if self.opts.logdir and self.opts.debug:\r\n logdir = self.opts.logdir\r\n else:\r\n logdir = self.app.config.get_logdir()\r\n\r\n if logdir and self.opts.debug:\r\n try:\r\n os.makedirs(logdir)\r\n except OSError as ex:\r\n if ex.errno == errno.EEXIST:\r\n pass\r\n else:\r\n raise\r\n\r\n if self.opts.debug:\r\n logfile = os.path.join(logdir, versioning.__shortname__+'.log')\r\n\r\n # Create a file logger since we got a logdir\r\n lfile = logging.FileHandler(filename=logfile)\r\n formatter = logging.Formatter(\"%(asctime)s %(levelname)s\\t: \" \\\r\n \"%(message)s\")\r\n\r\n lfile.setFormatter(formatter)\r\n lfile.setLevel(logging.DEBUG)\r\n LOGGER.addHandler(lfile)\r\n self.app.LOGGER = LOGGER\r\n\r\n cachedir = None\r\n if self.opts.nocache:\r\n self.app.config.set_cache(False)\r\n else:\r\n self.app.config.set_cachedir(os.path.join(self.opts.config_dir, \\\r\n 'cache'))\r\n cachedir = self.app.config.get_cachedir()\r\n\r\n if cachedir:\r\n try:\r\n os.makedirs(cachedir)\r\n except OSError as ex:\r\n if ex.errno == errno.EEXIST:\r\n pass\r\n else:\r\n raise\r\n\r\n if (\"login\" in line or any(x.startswith(\"--url\") for x in line) or not line)\\\r\n and not (any(x.startswith((\"-h\", \"--h\")) for x in nargv) or \"help\" in line):\r\n self.app.logout()\r\n else:\r\n self.app.restore()\r\n self.opts.is_redfish = self.app.updatedefinesflag(redfishflag=\\\r\n self.opts.is_redfish)\r\n\r\n if nargv:\r\n try:\r\n self.retcode = self._run_command(self.opts, nargv)\r\n if self.app.config.get_cache():\r\n if (\"logout\" not in line) and (\"--logout\" not in line):\r\n self.app.save()\r\n else:\r\n self.app.logout()\r\n except Exception as excp:\r\n self.handle_exceptions(excp)\r\n\r\n return self.retcode\r\n else:\r\n self.cmdloop(self.opts)\r\n\r\n if self.app.config.get_cache():\r\n self.app.save()\r\n else:\r\n self.app.logout()", "def main():\n arguments = docopt(__doc__, version=VERSION)\n\n # Handle the configure as a special case -- this way we won't get invalid\n # API credential messages when we're trying to configure stormpath-export.\n if arguments['configure']:\n configure()\n return\n\n exporter = StormpathExport(arguments['<base_url>'])\n exporter.export(arguments['<location>'])", "def run(args):\n pub_command = []\n sub_command = []\n\n script_dir = os.path.dirname(os.path.realpath(__file__))\n\n if not os.path.isfile(args.pub):\n print(f'Publisher executable file does not exists: {args.pub}')\n sys.exit(1)\n\n if not os.access(args.pub, os.X_OK):\n print(\n 'Publisher executable does not have execution permissions:'\n f'{args.pub}')\n\n pub_command.append(args.pub)\n\n if not os.path.isfile(args.sub):\n print(f'Subscriber executable file does not exists: {args.sub}')\n sys.exit(1)\n\n if not os.access(args.sub, os.X_OK):\n print(\n 'Subscriber executable does not have execution permissions:'\n f'{args.sub}')\n sys.exit(1)\n\n sub_command.append(args.sub)\n\n if args.xml_pub and args.xml_sub:\n if args.xml_pub:\n xml_file_pub = os.path.join(script_dir, args.xml_pub)\n if args.xml_sub:\n xml_file_sub = os.path.join(script_dir, args.xml_sub)\n else:\n print('Not provided xml configuration files.')\n sys.exit(1)\n\n pub_command.extend(['--xmlfile', xml_file_pub])\n sub_command.extend(['--xmlfile', xml_file_sub])\n\n pub_command.extend(['--seed', str(os.getpid())])\n sub_command.extend(['--seed', str(os.getpid())])\n\n if args.wait:\n pub_command.extend(['--wait', str(args.wait)])\n\n if args.samples:\n pub_command.extend(['--samples', str(args.samples)])\n sub_command.extend(['--samples', str(args.samples)])\n\n if len(args.servers) != len(args.xml_servers):\n print(\n 'Number of servers arguments should be equal to the number of xmls provided.')\n sys.exit(1)\n\n ds_procs = []\n for i in range(0, len(args.servers)):\n server_cmd = []\n\n if not os.path.isfile(args.servers[i]):\n print(f'Discovery server executable file does not exists: {args.servers[i]}')\n sys.exit(1)\n\n if not os.access(args.servers[i], os.X_OK):\n print(\n 'Discovery server executable does not have execution permissions:'\n f'{args.servers[i]}')\n sys.exit(1)\n\n server_cmd.append(args.servers[i])\n server_cmd.extend(['--xml-file', args.xml_servers[i]])\n server_cmd.extend(['--server-id', str(i)])\n\n ds_proc = subprocess.Popen(server_cmd)\n print(\n 'Running Discovery Server - commmand: ',\n ' '.join(map(str, server_cmd)))\n\n ds_procs.append(ds_proc)\n\n sub_proc = subprocess.Popen(sub_command)\n print(\n f'Running Subscriber - commmand: ',\n ' '.join(map(str, sub_command)))\n\n pub_proc = subprocess.Popen(pub_command)\n print(\n 'Running Publisher - commmand: ',\n ' '.join(map(str, pub_command)))\n\n try:\n outs, errs = sub_proc.communicate(timeout=15)\n except subprocess.TimeoutExpired:\n print('Subscriber process timed out, terminating...')\n sub_proc.kill()\n pub_proc.kill()\n [ds_proc.kill() for ds_proc in ds_procs]\n try:\n sys.exit(os.EX_SOFTWARE)\n except AttributeError:\n sys.exit(1)\n\n\n pub_proc.kill()\n ds_proc.kill()\n [ds_proc.kill() for ds_proc in ds_procs]\n try:\n sys.exit(os.EX_OK)\n except AttributeError:\n sys.exit(0)", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument('action', choices=['encrypt', 'decrypt'])\n parser.add_argument('environment', choices=['staging', 'prod'])\n parser.add_argument('-v', \"--verbose\", help=\"increase output verbosity\",\n action=\"store_true\")\n parser.add_argument('-f', '--folder', action='store', type=str, required=True,\n help='Output folder for results.')\n parser.add_argument('-a', '--app', action='store', type=str, required=False,\n help='Application name without spaces. This name will be appended to output files')\n args = parser.parse_args()\n encode_decode(args)\n return 0", "def load_keypass_options(subcmd, pfx):\n\n keypass_grp = subcmd.add_argument_group('Key / Password Select')\n\n kpg = keypass_grp.add_mutually_exclusive_group(required=True)\n\n kpg.add_argument(\n '--use-key', action='store_true', dest='use_key',\n help='enables usage of key for {}cryption - enter key in secure prompt or specify file with -i'.format(pfx))\n\n kpg.add_argument(\n '--use-pw', action='store_true', dest='use_pw',\n help='enables usage of password, salt, iterations for {}cryption - enter pw in secure prompt or specify file with -i'.format(pfx))\n\n load_pw_options(subcmd)", "def GenerateToolArgStrings(options):\n # Preparing dnstreexport\n dnstreeexport_array = [options.tree_export]\n dnstreeexport_array.extend(['-c', options.config_file])\n if( options.force ):\n dnstreeexport_array.append('--force')\n if( options.quiet ):\n dnstreeexport_array.append('--quiet')\n dnstreeexport_arg_string = ' '.join(dnstreeexport_array)\n\n # Preparing dnscheckconfig\n dnscheckconfig_array = [options.check_config]\n dnscheckconfig_array.extend(['-i', '%s' % options.id])\n dnscheckconfig_array.extend(['--config-file', options.config_file])\n if( options.named_checkzone ):\n dnscheckconfig_array.extend(['-z', options.named_checkzone])\n if( options.named_checkconf ):\n dnscheckconfig_array.extend(['-c', options.named_checkconf])\n if( not options.quiet ):\n dnscheckconfig_array.append('-v')\n dnscheckconfig_arg_string = ' '.join(dnscheckconfig_array)\n\n # Preparing dnsservercheck\n dnsservercheck_array = [options.server_check]\n dnsservercheck_array.extend(['--export-config'])\n dnsservercheck_array.extend(['-c', options.config_file])\n dnsservercheck_array.extend(['-i', '%s' % options.id])\n dnsservercheck_arg_string = ' '.join(dnsservercheck_array)\n\n # Preparing dnsconfigsync\n dnsconfigsync_array = [options.config_sync]\n dnsconfigsync_array.extend(['--export-config'])\n dnsconfigsync_array.extend(['-i', '%s' % options.id])\n dnsconfigsync_array.extend(['-c', options.config_file])\n if( options.ssh_id ):\n dnsconfigsync_array.extend(['--ssh-id', options.ssh_id])\n if( options.rndc_exec ):\n dnsconfigsync_array.extend(['--rndc-exec', options.rndc_exec])\n if( options.rndc_port ):\n dnsconfigsync_array.extend(['--rndc-port', options.rndc_port])\n if( options.rndc_key ):\n dnsconfigsync_array.extend(['--rndc-key', options.rndc_key])\n if( options.rndc_conf ):\n dnsconfigsync_array.extend(['--rndc-conf', options.rndc_conf])\n dnsconfigsync_arg_string = ' '.join(dnsconfigsync_array)\n\n # Preparing dnsquerycheck\n dnsquerycheck_array = [options.query_check]\n dnsquerycheck_array.extend(['--export-config'])\n dnsquerycheck_array.extend(['-c', options.config_file])\n dnsquerycheck_array.extend(['-i', '%s' % options.id])\n dnsquerycheck_array.extend(['-n', '%s' % options.number])\n dnsquerycheck_array.extend(['-p', '%s' % options.port])\n dnsquerycheck_arg_string = ' '.join(dnsquerycheck_array)\n\n return [dnstreeexport_arg_string,\n dnscheckconfig_arg_string,\n dnsservercheck_arg_string,\n dnsconfigsync_arg_string, \n dnsquerycheck_arg_string]", "def cli(ctx):\n if ctx.invoked_subcommand not in ['configure', 'generate_key', 'start_agent']:\n config = get_config_file()\n if config is None:\n raise click.UsageError(\"Configuration not found!\"\n \"Please run configure before first use\")", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument('--corpus_dir', required=True)\n parser.add_argument('--feature_module', required=True)\n\n args = parser.parse_args()\n corpus_dir = args.corpus_dir\n feature_module = args.feature_module\n\n return build_file(corpus_dir, feature_module)", "def cli_main():\n\n\n if len(sys.argv) > 1 and sys.argv[1].endswith('.xmind'):\n xmind_file = sys.argv[1]\n xmind_file = get_absolute_path(xmind_file)\n logging.info('Start to convert XMind file: %s', xmind_file)\n\n if len(sys.argv) == 3 and sys.argv[2] == '-json':\n testlink_json_file = xmind_testcase_to_json_file(xmind_file)\n logging.info('Convert XMind file to testcase json file successfully: %s', testlink_json_file)\n elif len(sys.argv) == 3 and sys.argv[2] == '-xml':\n testlink_xml_file = xmind_to_testlink_xml_file(xmind_file)\n logging.info('Convert XMind file to testlink xml files successfully: %s', testlink_xml_file)\n elif len(sys.argv) == 3 and sys.argv[2] == '-csv':\n zentao_csv_file = xmind_to_zentao_csv_file(xmind_file)\n logging.info('Convert XMind file to zentao csv file successfully: %s', zentao_csv_file)\n elif len(sys.argv) == 3 and sys.argv[2] == '-xlsx':\n excel_xlsx_file = xmind_to_xlsx_file(xmind_file)\n logging.info('Convert XMind file to zentao csv file successfully: %s', excel_xlsx_file)\n else:\n testlink_json_file = xmind_testcase_to_json_file(xmind_file)\n testlink_xml_file = xmind_to_testlink_xml_file(xmind_file)\n zentao_csv_file = xmind_to_zentao_csv_file(xmind_file)\n logging.info('Convert XMind file successfully: \\n'\n '1、 testcase json file(%s)\\n'\n '2、 testlink xml file(%s)\\n'\n '3、 zentao csv file(%s)',\n testlink_json_file,\n testlink_xml_file,\n zentao_csv_file)\n\n\n else:\n print(__doc__)\n logging.error('%s', __doc__)", "def create_ssl_cert_request ( ssl_hostnames ) :\n first_hostname = ssl_hostnames[ 0 ]\n csr_filename = get_ssl_csr_filename( first_hostname )\n key_filename = get_ssl_key_filename( first_hostname )\n openssl_cnf = \"\"\"\n[req]\ndistinguished_name = req_distinguished_name\nreq_extensions = san_ext\n\n[req_distinguished_name]\ncountryName_default = US\nstateOrProvinceName_default = New York\nlocalityName_default = New York\norganizationalUnitName_default = Home Box Office, Inc\ncommonName_default = \"\"\" + first_hostname + \"\"\"\n\n[san_ext]\nbasicConstraints = CA:FALSE\nkeyUsage = nonRepudiation, digitalSignature, keyEncipherment\nsubjectAltName = @sans\n\n[sans]\n\"\"\"\n counter = 0\n for hostname in ssl_hostnames :\n counter += 1\n openssl_cnf += 'DNS.' + str( counter ) + ' = ' + hostname + '\\n'\n\n with open( first_hostname, 'w' ) as f :\n f.write( openssl_cnf )\n cmd = 'openssl req -new -newkey rsa:2048 -nodes -out ' + csr_filename + ' -keyout ' + key_filename\n cmd += ' -config ' + first_hostname + ' -subj \"/C=US/ST=New York/L=New York/O=Home Box Office Inc/CN=' + first_hostname + '\"'\n keygen = subprocess.call( cmd, shell = True )\n os.remove( first_hostname )\n if keygen != 0 :\n print \"Generation of SSL request failed!\"\n return None\n\n return { 'csr-filename' : csr_filename, 'key-filename' : key_filename }", "def main(progname, args):\n parser = define_options(progname)\n opts = parser.parse_args(args)\n\n # look for a provided configuration file\n config = {}\n if opts.cfgfile:\n try:\n config = read_config(opts.cfgfile)\n except EnvironmentError as ex:\n raise Failure(\"problem reading config file, {0}: {1}\"\n .format(opts.cfgfile, ex.strerror))\n\n if not opts.platform:\n opts.platform = platform_profile\n\n # build a configuration from command-line arguments (if present); the aim\n # is to build a target called 'ops' that will send an email alert\n try:\n clicfg = build_ops_config(opts)\n except ValueError as ex:\n raise Failure(exitcode=4, cause=ex)\n\n # combine configs\n if clicfg:\n config['channels'] = clicfg.get('channels', []) + \\\n config.get('channels', [])\n config['targets'] = clicfg.get('targets', []) + \\\n config.get('targets', [])\n if 'archive_targets' in clicfg:\n config['archive_targets'] = \\\n list(set(clicfg['archive_targets'] + \\\n config.get('archive_targets', [])))\n \n # adjust the configuration to send to stdout if requested\n tm = None\n if opts.stdout:\n # register the stdout channels\n tm = TargetManager()\n tm.register_channel_class(\"stdoutmail\", StdoutMailer)\n chcfg = { \"name\": \"stdoutmail\",\n \"type\": \"stdoutmail\" }\n tm.define_channel(chcfg)\n tm.register_channel_class(\"stdoutarch\", StdoutArchiver)\n chcfg = { \"name\": \"stdoutarch\",\n \"type\": \"stdoutarch\" }\n tm.define_channel(chcfg)\n\n # find the email and archive channels\n echans = set(); achans = set()\n for chan in config.get('channels', []):\n if 'name' not in chan:\n continue\n if chan.get('type') == 'email':\n echans.add(chan['name'])\n elif chan.get('type') == 'archive':\n achans.add(chan['name'])\n echans.add('email')\n achans.add('archive')\n\n # now update the targets to swap out the channels\n for target in config['targets']:\n if target.get('channel') in echans:\n target['channel'] = 'stdoutmail'\n elif target.get('channel') in achans:\n target['channel'] = 'stdoutarchive'\n\n # create the notification service\n try:\n service = NotificationService(config, targetmgr=tm)\n except ConfigurationException as ex:\n raise Failure(\"config error: \"+str(ex), 2, ex)\n\n # create the notification\n notice = create_notice(opts)\n\n # send the notification\n if not opts.targets:\n opts.targets = [opts.etarget]\n try:\n service.distribute(opts.targets, notice)\n except ValueError as ex:\n raise Failure(exitcode=3, cause=ex)", "def command_line_arguments():\n\n try:\n parser = argparse.ArgumentParser(description='Log Handler/Cleaner/Copier for Idemia DocAuth')\n\n # Add required arguments.\n parser.add_argument('action', choices=['clean', 'download'], type=str, help='clean or download')\n\n # Parse the arguments\n args = parser.parse_args()\n\n return args\n\n except Exception as err:\n print(err)\n return", "def main(args=None):", "def main(args=None):", "def load_cert_string(string, format=FORMAT_PEM):\n bio = BIO.MemoryBuffer(string)\n return load_cert_bio(bio, format)", "def main():\n file_name = None\n key = None\n operation = None\n\n try:\n if len(sys.argv) == 1:\n raise Exception(\"No arguement passed!\")\n opts, args = getopt.getopt(sys.argv[1:], \"f:k:o:h\", [\"help\"])\n except Exception as error:\n print(error)\n sys.exit(1)\n\n for opt, arg in opts:\n if opt in (\"-h\", \"--help\"):\n usage()\n sys.exit()\n elif opt == \"-f\":\n file_name = arg\n elif opt == \"-k\":\n try:\n with open(arg) as key_file:\n key = key_file.read()\n except Exception as error:\n print(error)\n sys.exit()\n elif opt == \"-o\":\n operation = arg\n else:\n print(\"Invalid argument passed.\")\n sys.exit(1)\n \n if file_name == None or key == None or operation == None:\n print(\"Missing argument/s!\")\n usage()\n sys.exit(1)\n\n checker = DocumentChecker(file_name, key)\n\n if operation == \"1\":\n checker.add_sign()\n elif operation == \"2\":\n checker.check()\n elif operation == \"3\":\n checker.remove_sign()\n else:\n print(\"Invalid operation.\")\n sys.exit(1)", "def __init__(self, args:argparse.Namespace):\n\t\tglobal DISTRO\n\n\t\tself.dispersion = args.dispersion if args.dispersion > 0 else 0\n\t\tself.login_dispersion = args.login_dispersion if args.login_dispersion > 0 else 0\n\t\tself.wait_for_parents = bool(args.wait_for_parents)\n\t\tself.retries = args.retries if args.retries > 0 else 0\n\t\tself.rev_proxy_disable = args.rev_proxy_disable\n\t\tself.verify = not args.insecure\n\n\t\tsetLogLevel(args.log_level)\n\n\t\tlogging.info(\"Distribution detected as: '%s'\", DISTRO)\n\n\t\tself.hostname = (platform.node().split('.')[0], platform.node())\n\t\tlogging.info(\"Hostname detected as: '%s'\", self.fullHostname)\n\n\t\ttry:\n\t\t\tself.mode = Configuration.Modes[args.Mode.upper()]\n\t\texcept KeyError as e:\n\t\t\traise ValueError(\"Unrecognized Mode: '%s'\" % args.Mode)\n\n\t\tself.tsroot = parseTSRoot(args.ts_root)\n\t\tlogging.info(\"ATS root installation directory set to '%s'\", self.tsroot)\n\n\t\tself.useSSL, self.toHost, self.toPort = parseTOURL(args.to_url, self.verify)\n\t\tself.username, self.password = args.to_user, args.to_password", "def main():\n \n # Fixed paths to pacvert\n if hasattr(sys, 'frozen'):\n pacvert.FULL_PATH = os.path.abspath(sys.executable)\n else:\n pacvert.FULL_PATH = os.path.abspath(__file__)\n\n pacvert.PROG_DIR = os.path.dirname(pacvert.FULL_PATH)\n pacvert.ARGS = sys.argv[1:]\n\n # From sickbeard\n pacvert.SYS_PLATFORM = sys.platform\n pacvert.SYS_ENCODING = None\n\n try:\n locale.setlocale(locale.LC_ALL, \"\")\n pacvert.SYS_ENCODING = locale.getpreferredencoding()\n except (locale.Error, IOError):\n pass\n\n # for OSes that are poorly configured I'll just force UTF-8\n if not pacvert.SYS_ENCODING or pacvert.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):\n pacvert.SYS_ENCODING = 'UTF-8'\n \n # Set up and gather command line arguments\n parser = argparse.ArgumentParser(\n description='A Python based conversion tool.')\n\n parser.add_argument(\n '-v', '--verbose', action='store_true', help='Increase console logging verbosity')\n parser.add_argument(\n '-q', '--quiet', action='store_true', help='Turn off console logging')\n parser.add_argument(\n '-d', '--daemon', action='store_true', help='Run as a daemon')\n parser.add_argument(\n '-p', '--port', type=int, help='Force pacvert to run on a specified port')\n parser.add_argument(\n '--dev', action='store_true', help='Start pacvert in the development environment')\n parser.add_argument(\n '--datadir', help='Specify a directory where to store your data files')\n parser.add_argument(\n '--config', help='Specify a config file to use')\n parser.add_argument(\n '--nolaunch', action='store_true', help='Prevent browser from launching on startup')\n parser.add_argument(\n '--pidfile', help='Create a pid file (only relevant when running as a daemon)')\n\n args = parser.parse_args()\n\n if args.verbose:\n pacvert.VERBOSE = True\n if args.quiet:\n pacvert.QUIET = True\n \n \n # Do an intial setup of the logger.\n logger.initLogger(console=not pacvert.QUIET, log_dir=False,\n verbose=pacvert.VERBOSE)\n \n if args.dev:\n pacvert.DEV = True\n logger.debug(u\"pacvert is running in the dev environment.\")\n\n if args.daemon:\n if sys.platform == 'win32':\n sys.stderr.write(\n \"Daemonizing not supported under Windows, starting normally\\n\")\n else:\n pacvert.DAEMON = True\n pacvert.QUIET = True\n\n if args.pidfile:\n pacvert.PIDFILE = str(args.pidfile)\n\n # If the pidfile already exists, pacvert may still be running, so\n # exit\n if os.path.exists(pacvert.PIDFILE):\n raise SystemExit(\"PID file '%s' already exists. Exiting.\" %\n pacvert.PIDFILE)\n\n # The pidfile is only useful in daemon mode, make sure we can write the\n # file properly\n if pacvert.DAEMON:\n pacvert.CREATEPID = True\n\n try:\n with open(pacvert.PIDFILE, 'w') as fp:\n fp.write(\"pid\\n\")\n except IOError as e:\n raise SystemExit(\"Unable to write PID file: %s\", e)\n else:\n logger.warn(\"Not running in daemon mode. PID file creation \" \\\n \"disabled.\")\n\n # Determine which data directory and config file to use\n if args.datadir:\n pacvert.DATA_DIR = args.datadir\n else:\n pacvert.DATA_DIR = pacvert.PROG_DIR\n\n if args.config:\n config_file = args.config\n else:\n config_file = os.path.join(pacvert.DATA_DIR, config.FILENAME)\n\n # Try to create the DATA_DIR if it doesn't exist\n if not os.path.exists(pacvert.DATA_DIR):\n try:\n os.makedirs(pacvert.DATA_DIR)\n except OSError:\n raise SystemExit(\n 'Could not create data directory: ' + pacvert.DATA_DIR + '. Exiting....')\n\n # Make sure the DATA_DIR is writeable\n if not os.access(pacvert.DATA_DIR, os.W_OK):\n raise SystemExit(\n 'Cannot write to the data directory: ' + pacvert.DATA_DIR + '. Exiting...')\n\n # Put the database in the DATA_DIR\n #pacvert.DB_FILE = os.path.join(pacvert.DATA_DIR, database.FILENAME)\n\n if pacvert.DAEMON:\n pacvert.daemonize()\n\n # Read config and start logging\n pacvert.initialize(config_file)\n\n # Start the background threads\n pacvert.start()\n\n try:\n queue_worker.start_thread()\n except:\n logger.error(u\"Main thread did exit. Wtf.\")\n\n # Force the http port if neccessary\n if args.port:\n http_port = args.port\n logger.info('Using forced web server port: %i', http_port)\n else:\n http_port = int(pacvert.CONFIG.HTTP_PORT)\n\n # Check if pyOpenSSL is installed. It is required for certificate generation\n # and for CherryPy.\n if pacvert.CONFIG.ENABLE_HTTPS:\n try:\n import OpenSSL\n except ImportError:\n logger.warn(\"The pyOpenSSL module is missing. Install this \" \\\n \"module to enable HTTPS. HTTPS will be disabled.\")\n pacvert.CONFIG.ENABLE_HTTPS = False\n\n # Try to start the server. Will exit here is address is already in use.\n web_config = {\n 'http_port': http_port,\n 'http_host': pacvert.CONFIG.HTTP_HOST,\n 'http_root': pacvert.CONFIG.HTTP_ROOT,\n 'http_environment': pacvert.CONFIG.HTTP_ENVIRONMENT,\n 'http_proxy': pacvert.CONFIG.HTTP_PROXY,\n 'enable_https': pacvert.CONFIG.ENABLE_HTTPS,\n 'https_cert': pacvert.CONFIG.HTTPS_CERT,\n 'https_key': pacvert.CONFIG.HTTPS_KEY,\n 'http_username': pacvert.CONFIG.HTTP_USERNAME,\n 'http_password': pacvert.CONFIG.HTTP_PASSWORD,\n 'http_basic_auth': pacvert.CONFIG.HTTP_BASIC_AUTH\n }\n\n webstart.initialize(web_config)\n\n # Wait endlessy for a signal to happen\n while True:\n if not pacvert.SIGNAL:\n try:\n time.sleep(1)\n except KeyboardInterrupt:\n pacvert.SIGNAL = 'shutdown'\n else:\n logger.info('Received signal: %s', pacvert.SIGNAL)\n\n if pacvert.SIGNAL == 'shutdown':\n pacvert.shutdown()\n elif pacvert.SIGNAL == 'restart':\n pacvert.shutdown(restart=True)\n else:\n pacvert.shutdown(restart=True, update=True)\n\n pacvert.SIGNAL = None", "def __init__(self, argv):\n tool_path = str(self.__find_tool_path().resolve())\n\n try:\n result = subprocess.run(\n [tool_path],\n stdout=subprocess.PIPE,\n universal_newlines=True\n )\n\n if result.returncode != 0:\n sys.exit(result.returncode)\n\n if (\n len(argv) == 0 or\n (len(argv) == 1 and argv[0] == '-h') or\n (len(argv) == 1 and argv[0] == '--help')\n ):\n print(self.__edit_tool_help(result.stdout))\n else:\n # Call the tool\n result = subprocess.run([tool_path] + argv)\n if result.returncode != 0:\n sys.exit(result.returncode)\n\n except KeyboardInterrupt:\n # it lets the subprocess to handle the exception\n pass\n\n except BaseException as e:\n self.__help_message += str(e)\n self.__help_message += '\\n fast-discovery-server tool not found!'\n print(self.__help_message)\n sys.exit(1)", "def init(args: Optional[List[bytes]] = None) -> None:\n warnings.warn(_deprecation_warning(), FutureWarning)\n parsed = {}\n if args:\n for arg in args:\n kv = arg.decode().split('=')\n if len(kv) == 2:\n parsed[kv[0]] = kv[1]\n collective.init(**parsed)", "def build_certifications(data_dir, output_dir):\n return yamls_to_certification.create_yaml_certifications(\n data_dir=data_dir, output_dir=output_dir\n )", "def get_certificate_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args.get('vault_name', '')\n certificate_name = args.get('certificate_name', '')\n certificate_version = args.get('certificate_version', '')\n response = client.get_certificate_request(\n vault_name, certificate_name, certificate_version)\n\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['policy']['attributes'] = convert_time_attributes_to_iso(outputs['policy']['attributes'])\n\n readable_response = {'certificate_id': response.get(\n 'id'), **convert_attributes_to_readable(response.get('attributes', {}).copy())}\n outputs[VAULT_NAME_CONTEXT_FIELD] = vault_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Information',\n readable_response,\n ['certificate_id', 'enabled', 'create_time', 'update_time', 'expiry_time'],\n removeNull=True,\n headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Certificate',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results", "def init_parser() -> argparse.ArgumentParser:\n parser = argparse.ArgumentParser(\n description='proxy.py v%s' % __version__,\n epilog='Proxy.py not working? Report at: %s/issues/new' % __homepage__\n )\n # Argument names are ordered alphabetically.\n parser.add_argument(\n '--backlog',\n type=int,\n default=DEFAULT_BACKLOG,\n help='Default: 100. Maximum number of pending connections to proxy server')\n parser.add_argument(\n '--basic-auth',\n type=str,\n default=DEFAULT_BASIC_AUTH,\n help='Default: No authentication. Specify colon separated user:password '\n 'to enable basic authentication.')\n parser.add_argument(\n '--ca-key-file',\n type=str,\n default=DEFAULT_CA_KEY_FILE,\n help='Default: None. CA key to use for signing dynamically generated '\n 'HTTPS certificates. If used, must also pass --ca-cert-file and --ca-signing-key-file'\n )\n parser.add_argument(\n '--ca-cert-dir',\n type=str,\n default=DEFAULT_CA_CERT_DIR,\n help='Default: ~/.proxy.py. Directory to store dynamically generated certificates. '\n 'Also see --ca-key-file, --ca-cert-file and --ca-signing-key-file'\n )\n parser.add_argument(\n '--ca-cert-file',\n type=str,\n default=DEFAULT_CA_CERT_FILE,\n help='Default: None. Signing certificate to use for signing dynamically generated '\n 'HTTPS certificates. If used, must also pass --ca-key-file and --ca-signing-key-file'\n )\n parser.add_argument(\n '--ca-signing-key-file',\n type=str,\n default=DEFAULT_CA_SIGNING_KEY_FILE,\n help='Default: None. CA signing key to use for dynamic generation of '\n 'HTTPS certificates. If used, must also pass --ca-key-file and --ca-cert-file'\n )\n parser.add_argument(\n '--cert-file',\n type=str,\n default=DEFAULT_CERT_FILE,\n help='Default: None. Server certificate to enable end-to-end TLS encryption with clients. '\n 'If used, must also pass --key-file.'\n )\n parser.add_argument(\n '--client-recvbuf-size',\n type=int,\n default=DEFAULT_CLIENT_RECVBUF_SIZE,\n help='Default: 1 MB. Maximum amount of data received from the '\n 'client in a single recv() operation. Bump this '\n 'value for faster uploads at the expense of '\n 'increased RAM.')\n parser.add_argument(\n '--devtools-ws-path',\n type=str,\n default=DEFAULT_DEVTOOLS_WS_PATH,\n help='Default: /devtools. Only applicable '\n 'if --enable-devtools is used.'\n )\n parser.add_argument(\n '--disable-headers',\n type=str,\n default=COMMA.join(DEFAULT_DISABLE_HEADERS),\n help='Default: None. Comma separated list of headers to remove before '\n 'dispatching client request to upstream server.')\n parser.add_argument(\n '--disable-http-proxy',\n action='store_true',\n default=DEFAULT_DISABLE_HTTP_PROXY,\n help='Default: False. Whether to disable proxy.HttpProxyPlugin.')\n parser.add_argument(\n '--enable-devtools',\n action='store_true',\n default=DEFAULT_ENABLE_DEVTOOLS,\n help='Default: False. Enables integration with Chrome Devtool Frontend.'\n )\n parser.add_argument(\n '--enable-static-server',\n action='store_true',\n default=DEFAULT_ENABLE_STATIC_SERVER,\n help='Default: False. Enable inbuilt static file server. '\n 'Optionally, also use --static-server-dir to serve static content '\n 'from custom directory. By default, static file server serves '\n 'from public folder.'\n )\n parser.add_argument(\n '--enable-web-server',\n action='store_true',\n default=DEFAULT_ENABLE_WEB_SERVER,\n help='Default: False. Whether to enable proxy.HttpWebServerPlugin.')\n parser.add_argument('--hostname',\n type=str,\n default=str(DEFAULT_IPV6_HOSTNAME),\n help='Default: ::1. Server IP address.')\n parser.add_argument(\n '--key-file',\n type=str,\n default=DEFAULT_KEY_FILE,\n help='Default: None. Server key file to enable end-to-end TLS encryption with clients. '\n 'If used, must also pass --cert-file.'\n )\n parser.add_argument(\n '--log-level',\n type=str,\n default=DEFAULT_LOG_LEVEL,\n help='Valid options: DEBUG, INFO (default), WARNING, ERROR, CRITICAL. '\n 'Both upper and lowercase values are allowed. '\n 'You may also simply use the leading character e.g. --log-level d')\n parser.add_argument('--log-file', type=str, default=DEFAULT_LOG_FILE,\n help='Default: sys.stdout. Log file destination.')\n parser.add_argument('--log-format', type=str, default=DEFAULT_LOG_FORMAT,\n help='Log format for Python logger.')\n parser.add_argument('--num-workers', type=int, default=DEFAULT_NUM_WORKERS,\n help='Defaults to number of CPU cores.')\n parser.add_argument(\n '--open-file-limit',\n type=int,\n default=DEFAULT_OPEN_FILE_LIMIT,\n help='Default: 1024. Maximum number of files (TCP connections) '\n 'that proxy.py can open concurrently.')\n parser.add_argument(\n '--pac-file',\n type=str,\n default=DEFAULT_PAC_FILE,\n help='A file (Proxy Auto Configuration) or string to serve when '\n 'the server receives a direct file request. '\n 'Using this option enables proxy.HttpWebServerPlugin.')\n parser.add_argument(\n '--pac-file-url-path',\n type=str,\n default=text_(DEFAULT_PAC_FILE_URL_PATH),\n help='Default: %s. Web server path to serve the PAC file.' %\n text_(DEFAULT_PAC_FILE_URL_PATH))\n parser.add_argument(\n '--pid-file',\n type=str,\n default=DEFAULT_PID_FILE,\n help='Default: None. Save parent process ID to a file.')\n parser.add_argument(\n '--plugins',\n type=str,\n default=DEFAULT_PLUGINS,\n help='Comma separated plugins')\n parser.add_argument('--port', type=int, default=DEFAULT_PORT,\n help='Default: 8899. Server port.')\n parser.add_argument(\n '--server-recvbuf-size',\n type=int,\n default=DEFAULT_SERVER_RECVBUF_SIZE,\n help='Default: 1 MB. Maximum amount of data received from the '\n 'server in a single recv() operation. Bump this '\n 'value for faster downloads at the expense of '\n 'increased RAM.')\n parser.add_argument(\n '--static-server-dir',\n type=str,\n default=DEFAULT_STATIC_SERVER_DIR,\n help='Default: ' + DEFAULT_STATIC_SERVER_DIR + '. Static server root directory. '\n 'This option is only applicable when static server is also enabled. '\n 'See --enable-static-server.'\n )\n parser.add_argument(\n '--version',\n '-v',\n action='store_true',\n default=DEFAULT_VERSION,\n help='Prints proxy.py version.')\n return parser", "def main():\n optparser = initOpts()\n (options, args) = optparser.parse_args()\n\n output = StringIO.StringIO()\n \n assembleRDF(file(options.licenses_xml), output, options.verbose)\n\n if options.output_rdf:\n file(options.output_rdf, 'w').write(output.getvalue())\n else:\n print output.getvalue()", "def CreateRequests(self, args):\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, self.resources)\n certificate = file_utils.ReadFile(args.certificate, 'certificate')\n private_key = file_utils.ReadFile(args.private_key, 'private key')\n\n request = self.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=self.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=self.project)\n\n return [request]", "def print_usage():\n\n print \"Usage:\"\n print \" %s sign <priv_key> <cert> <file1 ...>\" % sys.argv[0]\n print \" %s verify <trust_dir> <file1 ...>\" % sys.argv[0]\n sys.exit(1)", "def parse_args():\n parser = argparse.ArgumentParser(\n description='''\n {nm}: TCP over TLS server to accept requests.\\n\n '''.format(nm=sys.argv[0]))\n parser.add_argument('-p',\n '--port',\n help='Server port to connect to, defaults to \"9999\".',\n required=False,\n default='9999')\n parser.add_argument('-c',\n '--cert',\n help='Server certificate file with path,'\n ' defaults to \"server.pem\" in current directory.',\n required=False,\n default='server.pem')\n parser.add_argument('-k',\n '--key',\n help='Server certificate key file with path,'\n ' defaults to \"server.key\" in current directory.',\n required=False,\n default='server.key')\n parser.add_argument('-ca',\n '--cert-auth',\n help='CA certificate file with path,'\n ' defaults to \"ca_cert.pem\" in current directory.',\n required=False,\n dest='ca_cert',\n default='ca_cert.pem')\n parser.add_argument('--log-level',\n help='Logger level, defaults to \"DEBUG\"',\n required=False,\n default='DEBUG')\n return vars(parser.parse_args())", "def load_x509_cert(url, httpc, spec2key, **get_args):\n try:\n r = httpc(\"GET\", url, allow_redirects=True, **get_args)\n if r.status_code == 200:\n cert = str(r.text)\n try:\n public_key = spec2key[cert] # If I've already seen it\n except KeyError:\n public_key = import_public_key_from_pem_data(cert)\n spec2key[cert] = public_key\n\n if isinstance(public_key, rsa.RSAPublicKey):\n return {\"rsa\": public_key}\n elif isinstance(public_key, ec.EllipticCurvePublicKey):\n return {\"ec\": public_key}\n else:\n raise Exception(\"HTTP Get error: %s\" % r.status_code)\n except Exception as err: # not a RSA key\n logger.warning(\"Can't load key: %s\" % err)\n return []", "def _load_ssl(self, ssl_options: tuple):\n try:\n self._ssl.load_cert_chain(certfile=ssl_options[0], keyfile=ssl_options[1], password=ssl_options[2])\n except IOError as e:\n self.logger.error(\"Unable to load certificate files: {}\".format(e))\n self.stop()", "def ca():\n return trustme.CA()", "def handle_cmdline_args():\n\n parser = argparse.ArgumentParser(\n description='Generate synthetic data from a specification in a json '\n 'file using the \"synth-method\" described in the json file. ')\n\n parser.add_argument(\n '-i', dest='infile', required=True,\n help='The input json file. Must contain a \"synth-method\" property')\n\n parser.add_argument(\n '-o', dest='outfile_prefix', required=True, help='The prefix of the output paths (data json and csv), relative to the QUIPP-pipeline root directory')\n\n args = parser.parse_args()\n return args", "def main(argv):\n usage = \"\"\"consul_records.py -c <client-id> -p <client-secret> -s <subscription> -d <domain> -r <resource_group> -z <zone> <consul_dns_servers...>\n\n Args:\n -c <client-id>: Service Principal client id\n -p <client-secret>: Service Principal client secret\n -s <subscription>: Subscription Id for the target Private DNS zone\n -d <domain>: DNS domain\n -z <zone>: Private DNS zone\n -r <resource_group>: Resource Group for the Private DNS zone\n <consul_dns_servers>: Space-separated list of Consul servers that serve the domain (use all 3 consul servers, they could be out of sync) \n\n \"\"\"\n client, secret, subscription, domain, resource_group, zone = [None, None, None, None, None, None]\n servers = []\n try:\n opts, args = getopt.getopt(\n argv, \"hc:p:s:d:r:z:\", [\"client=\", \"secret=\", \"subscription=\", \"domain=\", \"resource_group=\", \"zone=\"])\n except getopt.GetoptError:\n print(usage)\n sys.exit(2)\n for opt, arg in opts:\n if opt == '-h':\n print(usage)\n sys.exit()\n elif opt in (\"-c\", \"--client\"):\n client = arg\n elif opt in (\"-p\", \"--secret\"):\n secret = arg\n elif opt in (\"-s\", \"--subscription\"):\n subscription = arg\n elif opt in (\"-d\", \"--domain\"):\n domain = arg\n elif opt in (\"-r\", \"--resource-group\"):\n resource_group = arg\n elif opt in (\"-z\", \"--zone\"):\n zone = arg\n for arg in args:\n servers.append(arg)\n\n print(\"client={}, subscription={}, domain={}, resource_group={}, zone={}, servers={}\".format(\n client, subscription, domain, resource_group, zone, \",\".join(servers)))\n\n if not client or not secret or not subscription or not domain or not resource_group or not zone:\n print(usage)\n sys.exit(2)\n\n registration_client = RecordRegistrationClient(default_tenant_id, subscription, client, secret)\n records = load_records(servers)\n print(\"Loaded {} A records + {} CNAME records from source.\".format(len(records['A']), len(records['CNAME'])))\n registration_client.register_all(resource_group, zone, records)", "def setup():\n\n generators = {\"man\": gen_manpage, \"cpl\": gen_completions}\n\n prsr = argparse.ArgumentParser(\n description=\"xNVMe CLI Bash-completions and man page generator\",\n formatter_class=argparse.ArgumentDefaultsHelpFormatter,\n )\n prsr.add_argument(\n \"generator\",\n help=\"Generator to run\",\n default=sorted(generators.keys())[0],\n choices=sorted(generators.keys()),\n )\n prsr.add_argument(\n \"--tools\",\n nargs=\"*\",\n help=\"Name of tools to generate bash-completions for\",\n )\n prsr.add_argument(\n \"--output\",\n help=\"Path to directory in which to emit completion scripts\",\n default=os.sep.join([\".\"]),\n )\n prsr.add_argument(\n \"--log-level\",\n help=\"log-devel\",\n default=\"INFO\",\n choices=[\"DEBUG\", \"INFO\", \"WARNING\", \"ERROR\", \"CRITICAL\"],\n )\n\n args = prsr.parse_args()\n args.output = expand_path(args.output)\n args.gen = generators[args.generator]\n\n if not args.tools:\n args.tools = find_binaries()\n\n logging.basicConfig(\n format=\"%(asctime)s %(message)s\",\n level=getattr(logging, args.log_level.upper(), None),\n )\n\n return args", "def parse_command_line_args():\n parser = argparse.ArgumentParser(description=(\n 'HYAKUYOBAKO Data sender.'))\n parser.add_argument(\n '--project_id', required=True, help='GCP cloud project name')\n parser.add_argument(\n '--registry_id', required=True, help='Cloud IoT Core registry id')\n parser.add_argument(\n '--device_id', required=True, help='Cloud IoT Core device id')\n parser.add_argument(\n '--private_key_file',\n required=True,\n help='Path to private key file.')\n parser.add_argument(\n '--algorithm',\n choices=('RS256', 'ES256'),\n required=True,\n help='The encryption algorithm to use to generate the JWT.')\n parser.add_argument(\n '--cloud_region', default='us-central1', help='GCP cloud region')\n parser.add_argument(\n '--ca_certs',\n default='roots.pem',\n help=('CA root from https://pki.google.com/roots.pem'))\n parser.add_argument(\n '--message_type',\n choices=('event', 'state'),\n default='event',\n required=True,\n help=('Indicates whether the message to be published is a '\n 'telemetry event or a device state message.'))\n parser.add_argument(\n '--base_url',\n default=_BASE_URL,\n help=('Base URL for the Cloud IoT Core Device Service API'))\n parser.add_argument(\n '--jwt_expires_minutes',\n default=20,\n type=int,\n help=('Expiration time, in minutes, for JWT tokens.'))\n parser.add_argument(\n '--id',\n default=999,\n type=int,\n help=('Device id, not IoT Core device id for unique key.'))\n parser.add_argument(\n '--location_logitude',\n default=0.0,\n type=float,\n help=('Logitude of this deice. ex)35.658581'))\n parser.add_argument(\n '--location_latitude',\n default=0.0,\n type=float,\n help=('Latitude of this deice. ex)139.745433'))\n\n return parser.parse_args()", "def cli() -> object:\n parser = argparse.ArgumentParser(description=\"Expression Compiler\")\n parser.add_argument(\"sourcefile\", type=argparse.FileType('r'),\n help=\"Source program text\")\n parser.add_argument(\"outfile\", type=argparse.FileType('w'),\n nargs=\"?\", default=sys.stdout,\n help=\"Output file for assembly code\")\n args = parser.parse_args()\n return args", "def cli(configfile):\n if path.exists(configfile):\n load_config(\"aplet.yml\")\n pass", "def main():\n try:\n arguments = docopt(__doc__)\n house = arguments['--house']\n character = arguments['--character']\n book = arguments['--books']\n if house:\n get_house(house)\n if character:\n get_character(character)\n if book:\n get_book(book)\n\n except DocoptExit as e:\n print e.message", "def loaded_certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LoadedCertificateArgs']]]]:\n return pulumi.get(self, \"loaded_certificates\")" ]
[ "0.65129656", "0.6317035", "0.62203246", "0.6165697", "0.614384", "0.58114463", "0.5741986", "0.57261765", "0.57255286", "0.5722998", "0.5697117", "0.56766486", "0.5666311", "0.56107074", "0.5607905", "0.5587706", "0.55837715", "0.55678767", "0.54588795", "0.5440277", "0.54250616", "0.5407741", "0.53784996", "0.5377396", "0.535285", "0.53482795", "0.5296065", "0.52838963", "0.52624434", "0.5262231", "0.52558696", "0.5254619", "0.52084553", "0.5196903", "0.5196903", "0.51744753", "0.5165904", "0.51635605", "0.5162618", "0.51582754", "0.5156742", "0.5156095", "0.51382816", "0.50866395", "0.5084712", "0.5072637", "0.50650394", "0.5061021", "0.5061021", "0.5041444", "0.50382906", "0.50348204", "0.50198174", "0.5015919", "0.50139976", "0.4995452", "0.499379", "0.4984251", "0.498405", "0.49834985", "0.49800655", "0.49746323", "0.49693748", "0.49676076", "0.49673387", "0.49620718", "0.49500075", "0.49474382", "0.4936268", "0.49289608", "0.49230054", "0.49209797", "0.49027961", "0.48979962", "0.48932362", "0.48932362", "0.48922858", "0.48887298", "0.48745602", "0.48651496", "0.48632538", "0.48588887", "0.48499548", "0.48498926", "0.48432484", "0.48419103", "0.4828449", "0.48247585", "0.48143783", "0.48089856", "0.48083067", "0.4806417", "0.48058572", "0.48019394", "0.48005506", "0.48005024", "0.47942042", "0.4791159", "0.4787919", "0.47830284" ]
0.5525959
18
Load commandline arguments, create selfsigned CRT.
def selfsign_command(args): if args.files: die("Unexpected positional arguments") subject_info = info_from_args(args) if subject_info.ca: msg('Request for CA cert') else: msg('Request for end-entity cert') subject_info.show(msg_show) # Load private key, create signing request key = load_key(args.key, load_password(args.password_file)) subject_csr = create_x509_req(key, subject_info) # sign created request cert = do_sign(subject_csr, subject_csr, key, args.days, args.path_length, '<selfsign>') do_output(cert_to_pem(cert), args, 'x509')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run(self, line):\r\n if os.name == 'nt':\r\n if not ctypes.windll.shell32.IsUserAnAdmin() != 0:\r\n self.app.typepath.adminpriv = False\r\n elif not os.getuid() == 0:\r\n self.app.typepath.adminpriv = False\r\n\r\n nargv = []\r\n curr = []\r\n argfound = False\r\n\r\n if \"--version\" in line or \"-V\" in line:\r\n sys.stdout.write(\"\"\"%(progname)s %(version)s\\n\"\"\" % \\\r\n {'progname': versioning.__longname__, 'version': \\\r\n versioning.__version__})\r\n sys.stdout.flush()\r\n sys.exit(self.retcode)\r\n\r\n else:\r\n for argument in enumerate(line):\r\n if not argfound and not argument[1].startswith('-'):\r\n nargv = line[argument[0]:]\r\n break\r\n else:\r\n argfound = False\r\n\r\n if argument[1] == \"-c\":\r\n argfound = True\r\n\r\n curr.append(argument[1])\r\n\r\n (self.opts, _) = self.parser.parse_args(curr)\r\n\r\n try:\r\n Encryption.encode_credentials('test')\r\n self.app.set_encode_funct(Encryption.encode_credentials)\r\n self.app.set_decode_funct(Encryption.decode_credentials)\r\n self.encoding = True\r\n except redfish.hpilo.risblobstore2.ChifDllMissingError:\r\n self.encoding = False\r\n\r\n if self.opts.config is not None and len(self.opts.config) > 0:\r\n if not os.path.isfile(self.opts.config):\r\n self.retcode = ReturnCodes.CONFIGURATION_FILE_ERROR\r\n sys.exit(self.retcode)\r\n\r\n self.app.config_file = self.opts.config\r\n\r\n self.app.config_from_file(self.app.config_file)\r\n if self.opts.logdir and self.opts.debug:\r\n logdir = self.opts.logdir\r\n else:\r\n logdir = self.app.config.get_logdir()\r\n\r\n if logdir and self.opts.debug:\r\n try:\r\n os.makedirs(logdir)\r\n except OSError as ex:\r\n if ex.errno == errno.EEXIST:\r\n pass\r\n else:\r\n raise\r\n\r\n if self.opts.debug:\r\n logfile = os.path.join(logdir, versioning.__shortname__+'.log')\r\n\r\n # Create a file logger since we got a logdir\r\n lfile = logging.FileHandler(filename=logfile)\r\n formatter = logging.Formatter(\"%(asctime)s %(levelname)s\\t: \" \\\r\n \"%(message)s\")\r\n\r\n lfile.setFormatter(formatter)\r\n lfile.setLevel(logging.DEBUG)\r\n LOGGER.addHandler(lfile)\r\n self.app.LOGGER = LOGGER\r\n\r\n cachedir = None\r\n if self.opts.nocache:\r\n self.app.config.set_cache(False)\r\n else:\r\n self.app.config.set_cachedir(os.path.join(self.opts.config_dir, \\\r\n 'cache'))\r\n cachedir = self.app.config.get_cachedir()\r\n\r\n if cachedir:\r\n try:\r\n os.makedirs(cachedir)\r\n except OSError as ex:\r\n if ex.errno == errno.EEXIST:\r\n pass\r\n else:\r\n raise\r\n\r\n if (\"login\" in line or any(x.startswith(\"--url\") for x in line) or not line)\\\r\n and not (any(x.startswith((\"-h\", \"--h\")) for x in nargv) or \"help\" in line):\r\n self.app.logout()\r\n else:\r\n self.app.restore()\r\n self.opts.is_redfish = self.app.updatedefinesflag(redfishflag=\\\r\n self.opts.is_redfish)\r\n\r\n if nargv:\r\n try:\r\n self.retcode = self._run_command(self.opts, nargv)\r\n if self.app.config.get_cache():\r\n if (\"logout\" not in line) and (\"--logout\" not in line):\r\n self.app.save()\r\n else:\r\n self.app.logout()\r\n except Exception as excp:\r\n self.handle_exceptions(excp)\r\n\r\n return self.retcode\r\n else:\r\n self.cmdloop(self.opts)\r\n\r\n if self.app.config.get_cache():\r\n self.app.save()\r\n else:\r\n self.app.logout()", "def cli_arguments():\n\n parser = argparse.ArgumentParser(\n formatter_class=argparse.RawDescriptionHelpFormatter,\n usage=f\"\\n{Color.DETAIL}pdforce.py [-p <pdf>] [-w <wordlist>] [-e <encoding>] [-o <output>] [-c] [-h/--help]{Color.END}\",\n description=f\"{Color.EMPHASIS}{TITLE}\\nLightweight PDF password cracker. USE FOR LEGAL INTENTS ONLY.{Color.END}\",\n epilog=f\"{Color.EMPHASIS}Made by @poponealex - https://github.com/poponealex{Color.END}\",\n )\n\n parser.add_argument(\n \"-p\",\n \"--pdf\",\n type=str,\n help=f\"{Color.INFORMATION}Path to the pdf file.{Color.END}\",\n action=\"store\",\n default=\"\",\n )\n\n parser.add_argument(\n \"-w\",\n \"--wordlist\",\n type=str,\n help=f\"{Color.INFORMATION}Path to the wordlist.{Color.END}\",\n action=\"store\",\n default=\"\",\n )\n\n parser.add_argument(\n \"-e\",\n \"--encoding\",\n type=str,\n help=f\"{Color.INFORMATION}Specify an encoding for the wordlist (https://docs.python.org/3/library/codecs.html#standard-encodings). The default encoding is platform dependent. Use 'iso8859_1' for rockyou. {Color.END}\",\n action=\"store\",\n default=None,\n )\n\n parser.add_argument(\n \"-o\",\n \"--output\",\n help=f\"{Color.INFORMATION}Output the cracked password to a new file.{Color.END}\",\n action=\"store\",\n )\n\n parser.add_argument(\n \"-c\",\n \"--copy\",\n help=f\"{Color.INFORMATION}Copy the password to the clipboard.{Color.END}\",\n action=\"store_true\",\n )\n\n return parser.parse_args()", "def main(args=None):", "def main(args=None):", "def main(args):", "def main(args):", "def initialise(self, args, environ):", "def main():\n licensify(_parse_args())", "def main(self, args=None, **extra):\n if args is None:\n args = get_os_args()\n return super().main(args=preprocess_argument_list(args), **extra)", "def entrypoint_wrapper(argc, argv):\n list = [\"\"] * argc\n i = 0\n while i < argc:\n list[i] = rffi.charp2str(argv[i])\n i += 1\n return entrypoint(list)", "def process_args(args):\r\n has_special_chars = False if '-an' in args else True\r\n is_case_sensitive = False if '-ci' in args else True\r\n\r\n if '-l' in args:\r\n return RndmPswdGen(pw_len=int(args[args.index('-l') + 1]),\r\n is_spcl=has_special_chars,\r\n is_case_snstv=is_case_sensitive)\r\n return RndmPswdGen(is_spcl=has_special_chars,\r\n is_case_snstv=is_case_sensitive)", "def setup_request_commandline() -> Request:\r\n parser = argparse.ArgumentParser()\r\n parser.add_argument(\"key\", help=\"The key to use when encrypting or \"\r\n \"decrypting. This needs to be of \"\r\n \"length 8, 16 or 24\")\r\n parser.add_argument(\"-s\", \"--string\", help=\"The string that needs to be \"\r\n \"encrypted or decrypted\")\r\n parser.add_argument(\"-f\", \"--file\", help=\"The text file that needs to be\"\r\n \"encrypted or decrypted\")\r\n parser.add_argument(\"-o\", \"--output\", default=\"print\",\r\n help=\"The output of the program. This is 'print' by \"\r\n \"default, but can be set to a file name as well.\")\r\n parser.add_argument(\"-m\", \"--mode\", default=\"en\",\r\n help=\"The mode to run the program in. If 'en' (default)\"\r\n \" then the program will encrypt, 'de' will cause \"\r\n \"the program to decrypt\")\r\n try:\r\n args = parser.parse_args()\r\n request = Request()\r\n request.encryption_state = CryptoMode(args.mode)\r\n request.data_input = args.string\r\n request.input_file = args.file\r\n request.output = args.output\r\n request.key = args.key\r\n print(request)\r\n return request\r\n except Exception as e:\r\n print(f\"Error! Could not read arguments.\\n{e}\")\r\n quit()", "def main(args=None):\n pass", "def cli(args): # noqa; pylint: disable=unused-argument", "def _set_arguments(self):\n cert_location = f\"dependencies{sep}certificates{sep}localuser.crt\"\n key_location = f\"dependencies{sep}certificates{sep}localuser.key\"\n assert Path(cert_location).exists(), (\n f\"The certificate isn't \"\n f\"present at location {Path(cert_location).absolute()}\"\n )\n assert Path(key_location).exists(), (\n f\"The certificate key isn't \"\n f\"present at location {Path(key_location).absolute()}\"\n )\n self._arguments = [\n (\n \"test-certificate-verify\",\n [\"-k\", key_location, \"-c\", cert_location],\n ),\n (\n \"test-sig-algs\",\n [],\n ),\n (\n \"test-clienthello-md5\",\n [],\n ),\n (\n \"test-tls13-pkcs-signature\",\n [],\n ),\n ]", "def command_line_arguments():\n\n try:\n parser = argparse.ArgumentParser(description='Log Handler/Cleaner/Copier for Idemia DocAuth')\n\n # Add required arguments.\n parser.add_argument('action', choices=['clean', 'download'], type=str, help='clean or download')\n\n # Parse the arguments\n args = parser.parse_args()\n\n return args\n\n except Exception as err:\n print(err)\n return", "def main(args):\n cli = CLI()\n # Check arguments\n cli.parse_arguments(args)", "def main() -> None:\n init(args=sys.argv[1:])", "def setup_request_commandline() -> Request:\n parser = argparse.ArgumentParser()\n parser.add_argument(\"key\", help=\"The key to use when encrypting or \"\n \"decrypting. This needs to be of \"\n \"length 8, 16 or 24\")\n parser.add_argument(\"-s\", \"--string\", help=\"The string that needs to be \"\n \"encrypted or decrypted\")\n parser.add_argument(\"-f\", \"--file\", help=\"The text file that needs to be\"\n \"encrypted or decrypted\")\n parser.add_argument(\"-o\", \"--output\", default=\"print\",\n help=\"The output of the program. This is 'print' by \"\n \"default, but can be set to a file name as well.\")\n parser.add_argument(\"-m\", \"--mode\", default=\"en\",\n help=\"The mode to run the program in. If 'en' (default)\"\n \" then the program will encrypt, 'de' will cause \"\n \"the program to decrypt\")\n try:\n args = parser.parse_args()\n request = Request()\n request.encryption_state = CryptoMode(args.mode)\n request.data_input = args.string\n request.input_file = args.file\n request.output = args.output\n request.key = args.key\n print(request)\n return request\n except Exception as e:\n print(f\"Error! Could not read arguments.\\n{e}\")\n quit()", "def configure_commandline(cmdline_arguments: argparse.Namespace) -> Optional[Text]:", "def init_args():\n parser = argparse.ArgumentParser(\n description=\"DeltaSherlock Client software.\")\n parser.add_argument('-v', '--version', action='version', version=VERSION)\n parser.add_argument('-c', '--config', action='store', dest='config_file',\n default='./config.ini', help=\"Path to config file. [default: \\\n %(default)s]\")\n parser.add_argument('-d', '--daemon', action='store_true', dest='daemon',\n default=False, help=\"Run in daemon mode. [default: \\\n %(default)s]\")\n return parser.parse_args()", "def cli():\n config, auth, execute_now = read_command_line_arguments()\n main(config, auth, execute_now)", "def _set_runtime_infos(args):\n import os\n runtime = cc.view('_runtime')\n runtime.set('command', args.launcherid)\n runtime.set('reloader', args.use_reloader)\n cc.set('absoluteDir', os.path.abspath(cc.get('dir')) + '/')", "def add_args(parser):\n add_encoder_args(parser)\n add_decoder_args(parser)", "def test_main_arguments():\n args = argparse.Namespace(url=RANDOM_URL,\n username=RANDOM_USERNAME,\n password=RANDOM_PASSWORD,\n tenantcode=RANDOM_TENANTCODE)\n result = Config(\"wso_args.json\").main(args)\n\n assert result is True", "def main():\n args = parse_args()\n process_args(args)", "def cmd_entry():\n import argparse\n\n parser = argparse.ArgumentParser(\n description=\"Web based frontend to the health record system databaser\"\n )\n parser.add_argument('-c', '--config', required=True, help=\"Config file to load\")\n args = parser.parse_args()\n\n main(args.config)", "def run():\n if len(sys.argv)<2:\n sys.argv.append(\"-h\")\n plac.call(main)", "def _initialize_from_cookiecutter_args(self, cookiecutter_args: dict[str, str]):\n self.golden_tests = cookiecutter_args[\"add_golden\"] == \"y\"\n self.github_owner = cookiecutter_args[\"github_owner\"]\n # Allow copyright holder and copyright year to be missing in the cookiecutter\n # args. Fallback to VSHN AG <[email protected]> and the current year here.\n self.copyright_holder = cookiecutter_args.get(\n \"copyright_holder\", \"VSHN AG <[email protected]>\"\n )\n self.copyright_year = cookiecutter_args.get(\"copyright_year\")\n if \"test_cases\" in cookiecutter_args:\n self.test_cases = cookiecutter_args[\"test_cases\"].split(\" \")\n else:\n self.test_cases = [\"defaults\"]\n\n return False", "def load_args(init_regs: Registers, memory: Memory, args: List[str]):\n init_regs[\"$a0\"] = len(args) # argc\n\n argv: List[int] = []\n for arg in args:\n ptr = memory.extend_stack(bytesify(arg))\n argv.append(ptr)\n\n argv.append(0)\n\n for idx, ptr in enumerate(argv[::-1]):\n memory.extend_stack(bytesify(ptr, size=4), align_data=True)\n\n init_regs[\"$a1\"] = memory.ram[\"stack\"][\"stops\"] # argv", "def main(argv):\n\n\n parser = argparse.ArgumentParser(description='convert der to raw')\n parser.add_argument('-s','--secretkey_file', help='Secret key', required=True)\n parser.add_argument('-p','--publickey_file', help='Public key', required=True)\n args = parser.parse_args()\n\n secretkey_file = args.secretkey_file\n publickey_file = args.publickey_file\n\n\n privkey = SigningKey.from_der(open(secretkey_file).read())\n pubkey = VerifyingKey.from_der(open(publickey_file).read())\n\n open(secretkey_file[0:-4] + \".bin\", \"wb\").write(privkey.to_string())\n open(publickey_file[0:-4] + \".bin\", \"wb\").write(pubkey.to_string())", "def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n return _Run(args, holder, ssl_certificate_ref)", "def add_args(parser):\n parser.add_argument(\n \"--share-encoder-embeddings\",\n action=\"store_true\",\n help=\"share encoder embeddings across languages\",\n )\n parser.add_argument(\n \"--share-decoder-embeddings\",\n action=\"store_true\",\n help=\"share decoder embeddings across languages\",\n )\n parser.add_argument(\n \"--share-encoders\",\n action=\"store_true\",\n help=\"share encoders across languages\",\n )\n parser.add_argument(\n \"--share-decoders\",\n action=\"store_true\",\n help=\"share decoders across languages\",\n )", "def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n return _Run(args, holder, ssl_certificate_ref)", "def process_cl_args():\n\n parser = argparse.ArgumentParser(add_help=False)\n parser.add_argument('commands', nargs='*')\n parser.add_argument('--help', '-h', action='store_true')\n parser.add_argument('--version', '-v', action='store_true')\n parser.add_argument('--debug', '-d', action='store_true')\n parser.add_argument('--logging', '-l', action='store_true')\n parser.add_argument('--no-autosize', action='store_true')\n parser.add_argument('--no-preload', action='store_true')\n args = parser.parse_args()\n\n if args.version:\n xprint(get_version_info())\n xprint(\"\")\n sys.exit()\n\n elif args.help:\n for x in helptext():\n xprint(x[2])\n sys.exit()\n\n if args.debug or os.environ.get(\"mpsytdebug\") == \"1\":\n xprint(get_version_info())\n g.debug_mode = True\n g.no_clear_screen = True\n logfile = os.path.join(tempfile.gettempdir(), \"mpsyt.log\")\n logging.basicConfig(level=logging.DEBUG, filename=logfile)\n logging.getLogger(\"pafy\").setLevel(logging.DEBUG)\n\n elif args.logging or os.environ.get(\"mpsytlog\") == \"1\":\n logfile = os.path.join(tempfile.gettempdir(), \"mpsyt.log\")\n logging.basicConfig(level=logging.DEBUG, filename=logfile)\n logging.getLogger(\"pafy\").setLevel(logging.DEBUG)\n\n if args.no_autosize:\n g.detectable_size = False\n\n g.command_line = \"playurl\" in args.commands or \"dlurl\" in args.commands\n if g.command_line:\n g.no_clear_screen = True\n\n if args.no_preload:\n g.preload_disabled = True\n\n g.argument_commands = args.commands", "def main(cls, args=None):\n parser = cls.CreateParser()\n cls.AddCommandLineArgs(parser, None)\n options, extra_args = parser.parse_known_args(args=args)\n cls.ProcessCommandLineArgs(parser, options, extra_args, None)\n return min(cls().Run(options, extra_args), 255)", "def get_cli_arguments(self):\n pass", "def test_with_explicit_sample_args(self):\n test_dict = CliArgs('sample', ['-a', '26', '-s', 'somefile', '-n', '-u', 'foo', '-v']).__dict__\n self.assertEqual('foo', test_dict['user'])\n self.assertEqual(1, test_dict['verbosity'])\n self.assertEqual('26', test_dict['analyzer_profile'])", "def main(args=None):\n print(\"Hello kagglemalware!\")\n return args", "def test_main_required_args(self):\n args = [\n \"--layout\",\n self.layout_single_signed_path,\n \"--layout-keys\",\n self.alice_path,\n ]\n\n self.assert_cli_sys_exit(args, 0)", "def test_main_required_args(self):\n args = [\n \"--layout\",\n self.layout_single_signed_path,\n \"--layout-keys\",\n self.alice_path,\n ]\n\n self.assert_cli_sys_exit(args, 0)", "def _init_global_context(cli_args):\n context.CLIARGS = CLIArgs.from_options(cli_args)", "def _init_global_context(cli_args):\n context.CLIARGS = CLIArgs.from_options(cli_args)", "def checkArguments ( ) :\r\n\r\n if len( sys.argv ) <= 1 : return None\r\n\r\n\r\n # splits the arguments that contain quotes\r\n \r\n wordList = [ ]\r\n\r\n for argument in sys.argv :\r\n\r\n wordList.extend( argument.split( '\"' ) )\r\n\r\n\r\n # places all the arguments that start with \"--\" at the end, and joins the others into words\r\n\r\n noMinusList = [ ]\r\n\r\n minusList = [ ]\r\n\r\n argument = \"\"\r\n\r\n for word in wordList[ 1 : ] :\r\n\r\n # strips spaces and quotes\r\n \r\n word = word.strip( \" \\\"'\" ) \r\n\r\n if word.startswith( \"--\" ) :\r\n\r\n minusList.append( word )\r\n\r\n if len( argument ) > 0 : noMinusList.append( argument )\r\n\r\n argument = \"\"\r\n\r\n elif argument == \"\" :\r\n\r\n argument = word\r\n\r\n else :\r\n\r\n argument = argument + \" \" + word\r\n\r\n if len( argument ) > 0 : noMinusList.append( argument )\r\n\r\n\r\n # library = 1st argument of the form \"-- ... /\" that exists\r\n\r\n libraryPath = None\r\n\r\n for argument in minusList :\r\n\r\n if ( ( argument.endswith( os.sep ) ) and ( os.path.exists( argument.strip( \"- \" ) ) ) ) :\r\n\r\n libraryPath = argument.strip( \"-\" )\r\n\r\n break\r\n\r\n # recomposes the command line\r\n \r\n sys.argv = wordList[ : 1 ] + noMinusList + minusList \r\n\r\n return libraryPath", "def __init__(self, args:argparse.Namespace):\n\t\tglobal DISTRO\n\n\t\tself.dispersion = args.dispersion if args.dispersion > 0 else 0\n\t\tself.login_dispersion = args.login_dispersion if args.login_dispersion > 0 else 0\n\t\tself.wait_for_parents = bool(args.wait_for_parents)\n\t\tself.retries = args.retries if args.retries > 0 else 0\n\t\tself.rev_proxy_disable = args.rev_proxy_disable\n\t\tself.verify = not args.insecure\n\n\t\tsetLogLevel(args.log_level)\n\n\t\tlogging.info(\"Distribution detected as: '%s'\", DISTRO)\n\n\t\tself.hostname = (platform.node().split('.')[0], platform.node())\n\t\tlogging.info(\"Hostname detected as: '%s'\", self.fullHostname)\n\n\t\ttry:\n\t\t\tself.mode = Configuration.Modes[args.Mode.upper()]\n\t\texcept KeyError as e:\n\t\t\traise ValueError(\"Unrecognized Mode: '%s'\" % args.Mode)\n\n\t\tself.tsroot = parseTSRoot(args.ts_root)\n\t\tlogging.info(\"ATS root installation directory set to '%s'\", self.tsroot)\n\n\t\tself.useSSL, self.toHost, self.toPort = parseTOURL(args.to_url, self.verify)\n\t\tself.username, self.password = args.to_user, args.to_password", "def parse_arguments(args):", "def entry_point():", "def entry_point():", "def entry_point():", "def add_args(self, parser):", "def DistEntry():\n flags.StartMain(main)", "def main():\n \n # Fixed paths to pacvert\n if hasattr(sys, 'frozen'):\n pacvert.FULL_PATH = os.path.abspath(sys.executable)\n else:\n pacvert.FULL_PATH = os.path.abspath(__file__)\n\n pacvert.PROG_DIR = os.path.dirname(pacvert.FULL_PATH)\n pacvert.ARGS = sys.argv[1:]\n\n # From sickbeard\n pacvert.SYS_PLATFORM = sys.platform\n pacvert.SYS_ENCODING = None\n\n try:\n locale.setlocale(locale.LC_ALL, \"\")\n pacvert.SYS_ENCODING = locale.getpreferredencoding()\n except (locale.Error, IOError):\n pass\n\n # for OSes that are poorly configured I'll just force UTF-8\n if not pacvert.SYS_ENCODING or pacvert.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):\n pacvert.SYS_ENCODING = 'UTF-8'\n \n # Set up and gather command line arguments\n parser = argparse.ArgumentParser(\n description='A Python based conversion tool.')\n\n parser.add_argument(\n '-v', '--verbose', action='store_true', help='Increase console logging verbosity')\n parser.add_argument(\n '-q', '--quiet', action='store_true', help='Turn off console logging')\n parser.add_argument(\n '-d', '--daemon', action='store_true', help='Run as a daemon')\n parser.add_argument(\n '-p', '--port', type=int, help='Force pacvert to run on a specified port')\n parser.add_argument(\n '--dev', action='store_true', help='Start pacvert in the development environment')\n parser.add_argument(\n '--datadir', help='Specify a directory where to store your data files')\n parser.add_argument(\n '--config', help='Specify a config file to use')\n parser.add_argument(\n '--nolaunch', action='store_true', help='Prevent browser from launching on startup')\n parser.add_argument(\n '--pidfile', help='Create a pid file (only relevant when running as a daemon)')\n\n args = parser.parse_args()\n\n if args.verbose:\n pacvert.VERBOSE = True\n if args.quiet:\n pacvert.QUIET = True\n \n \n # Do an intial setup of the logger.\n logger.initLogger(console=not pacvert.QUIET, log_dir=False,\n verbose=pacvert.VERBOSE)\n \n if args.dev:\n pacvert.DEV = True\n logger.debug(u\"pacvert is running in the dev environment.\")\n\n if args.daemon:\n if sys.platform == 'win32':\n sys.stderr.write(\n \"Daemonizing not supported under Windows, starting normally\\n\")\n else:\n pacvert.DAEMON = True\n pacvert.QUIET = True\n\n if args.pidfile:\n pacvert.PIDFILE = str(args.pidfile)\n\n # If the pidfile already exists, pacvert may still be running, so\n # exit\n if os.path.exists(pacvert.PIDFILE):\n raise SystemExit(\"PID file '%s' already exists. Exiting.\" %\n pacvert.PIDFILE)\n\n # The pidfile is only useful in daemon mode, make sure we can write the\n # file properly\n if pacvert.DAEMON:\n pacvert.CREATEPID = True\n\n try:\n with open(pacvert.PIDFILE, 'w') as fp:\n fp.write(\"pid\\n\")\n except IOError as e:\n raise SystemExit(\"Unable to write PID file: %s\", e)\n else:\n logger.warn(\"Not running in daemon mode. PID file creation \" \\\n \"disabled.\")\n\n # Determine which data directory and config file to use\n if args.datadir:\n pacvert.DATA_DIR = args.datadir\n else:\n pacvert.DATA_DIR = pacvert.PROG_DIR\n\n if args.config:\n config_file = args.config\n else:\n config_file = os.path.join(pacvert.DATA_DIR, config.FILENAME)\n\n # Try to create the DATA_DIR if it doesn't exist\n if not os.path.exists(pacvert.DATA_DIR):\n try:\n os.makedirs(pacvert.DATA_DIR)\n except OSError:\n raise SystemExit(\n 'Could not create data directory: ' + pacvert.DATA_DIR + '. Exiting....')\n\n # Make sure the DATA_DIR is writeable\n if not os.access(pacvert.DATA_DIR, os.W_OK):\n raise SystemExit(\n 'Cannot write to the data directory: ' + pacvert.DATA_DIR + '. Exiting...')\n\n # Put the database in the DATA_DIR\n #pacvert.DB_FILE = os.path.join(pacvert.DATA_DIR, database.FILENAME)\n\n if pacvert.DAEMON:\n pacvert.daemonize()\n\n # Read config and start logging\n pacvert.initialize(config_file)\n\n # Start the background threads\n pacvert.start()\n\n try:\n queue_worker.start_thread()\n except:\n logger.error(u\"Main thread did exit. Wtf.\")\n\n # Force the http port if neccessary\n if args.port:\n http_port = args.port\n logger.info('Using forced web server port: %i', http_port)\n else:\n http_port = int(pacvert.CONFIG.HTTP_PORT)\n\n # Check if pyOpenSSL is installed. It is required for certificate generation\n # and for CherryPy.\n if pacvert.CONFIG.ENABLE_HTTPS:\n try:\n import OpenSSL\n except ImportError:\n logger.warn(\"The pyOpenSSL module is missing. Install this \" \\\n \"module to enable HTTPS. HTTPS will be disabled.\")\n pacvert.CONFIG.ENABLE_HTTPS = False\n\n # Try to start the server. Will exit here is address is already in use.\n web_config = {\n 'http_port': http_port,\n 'http_host': pacvert.CONFIG.HTTP_HOST,\n 'http_root': pacvert.CONFIG.HTTP_ROOT,\n 'http_environment': pacvert.CONFIG.HTTP_ENVIRONMENT,\n 'http_proxy': pacvert.CONFIG.HTTP_PROXY,\n 'enable_https': pacvert.CONFIG.ENABLE_HTTPS,\n 'https_cert': pacvert.CONFIG.HTTPS_CERT,\n 'https_key': pacvert.CONFIG.HTTPS_KEY,\n 'http_username': pacvert.CONFIG.HTTP_USERNAME,\n 'http_password': pacvert.CONFIG.HTTP_PASSWORD,\n 'http_basic_auth': pacvert.CONFIG.HTTP_BASIC_AUTH\n }\n\n webstart.initialize(web_config)\n\n # Wait endlessy for a signal to happen\n while True:\n if not pacvert.SIGNAL:\n try:\n time.sleep(1)\n except KeyboardInterrupt:\n pacvert.SIGNAL = 'shutdown'\n else:\n logger.info('Received signal: %s', pacvert.SIGNAL)\n\n if pacvert.SIGNAL == 'shutdown':\n pacvert.shutdown()\n elif pacvert.SIGNAL == 'restart':\n pacvert.shutdown(restart=True)\n else:\n pacvert.shutdown(restart=True, update=True)\n\n pacvert.SIGNAL = None", "def main(cmd_line_args):\n if cmd_line_args.clean:\n clean_wrapper(cmd_line_args)\n if cmd_line_args.nobuild == False:\n build_wrapper(cmd_line_args)", "def ReadArguments():\n\n args = ParseArguments()\n\n logging.info('Command line arguments...')\n for arg in vars(args):\n logging.info(str(arg) + ': ' + str(getattr(args, arg)))\n logging.info('')\n\n IsTest(args)\n ProcessCacheSize(args)\n ProcessLineSize(args)\n ProcessMulti(args)\n ProcessMemPattern(args)\n ProcessMemFile(args)", "def _create_argument_parser():\n\n parser = argparse.ArgumentParser(\n description=\"Execute a CPAchecker run in the VerifierCloud using the web interface.\"\n + \" Command-line parameters can additionally be read from a file if file name prefixed with '@' is given as argument.\",\n fromfile_prefix_chars=\"@\",\n add_help=False, # conflicts with -heap\n )\n\n parser.add_argument(\"-h\", \"--help\", action=\"help\", help=\"Prints this help.\")\n\n parser.add_argument(\n \"--cloudMaster\",\n dest=\"cloud_master\",\n default=\"https://vcloud.sosy-lab.org/cpachecker/webclient/\",\n metavar=\"HOST\",\n help=\"Sets the webclient host of the VerifierCloud instance to be used.\",\n )\n\n parser.add_argument(\n \"--cloudPriority\",\n dest=\"cloud_priority\",\n metavar=\"PRIORITY\",\n help=\"Sets the priority for this benchmark used in the VerifierCloud. Possible values are IDLE, LOW, HIGH, URGENT.\",\n )\n\n parser.add_argument(\n \"--cloudCPUModel\",\n dest=\"cpu_model\",\n type=str,\n default=None,\n metavar=\"CPU_MODEL\",\n help=\"Only execute runs in the VerifierCloud on CPU models that contain the given string.\",\n )\n\n parser.add_argument(\n \"--cloudUser\",\n dest=\"cloud_user\",\n metavar=\"USER:PWD\",\n help=\"The user and password for the VerifierCloud.\",\n )\n\n parser.add_argument(\n \"--revision\",\n dest=\"revision\",\n metavar=\"BRANCH:REVISION\",\n help=\"The svn revision of CPAchecker to use.\",\n )\n\n parser.add_argument(\n \"-d\", \"--debug\", action=\"store_true\", help=\"Enable debug output\"\n )\n\n parser.add_argument(\n \"-o\",\n \"--outputpath\",\n dest=\"output_path\",\n type=str,\n default=DEFAULT_OUTPUT_PATH,\n help=\"Output prefix for the generated results. \"\n + \"If the path is a folder files are put into it,\"\n + \"otherwise it is used as a prefix for the resulting files.\",\n )\n parser.add_argument(\n \"--resultFilePattern\",\n dest=\"result_file_pattern\",\n type=str,\n default=\"**\",\n help=\"Only files matching this glob pattern are transported back to the client.\",\n )\n\n parser.add_argument(\n \"-T\",\n \"--timelimit\",\n dest=\"timelimit\",\n default=None,\n type=util.parse_timespan_value,\n help=\"Time limit in seconds\",\n metavar=\"SECONDS\",\n )\n\n parser.add_argument(\n \"-M\",\n \"--memorylimit\",\n dest=\"memorylimit\",\n default=None,\n type=util.parse_memory_value,\n help=\"Memory limit\",\n metavar=\"BYTES\",\n )\n\n parser.add_argument(\n \"-c\",\n \"--corelimit\",\n dest=\"corelimit\",\n type=int,\n default=None,\n metavar=\"N\",\n help=\"Limit the tool to N CPU cores.\",\n )\n\n parser.add_argument(\n \"--version\", action=\"version\", version=\"%(prog)s \" + __version__\n )\n return parser", "def add_cmdline_args(cls, argparser):\n agent = argparser.add_argument_group('Safe Local Human Arguments')\n agent.add_argument(\n '--safety',\n type=str,\n default='all',\n choices={'none', 'string_matcher', 'classifier', 'all'},\n help='Apply safety filtering to messages',\n )\n super(SafeLocalHumanAgent, cls).add_cmdline_args(argparser)", "def get_args_from_console(args):\n return {\n \"cleaning_policy\": args.cleaning_policy,\n \"clear\": args.clear,\n \"content\": args.content,\n \"dry_run\": args.dry_run,\n \"force\": args.force,\n \"in_lines\": args.in_lines,\n \"max_size\": args.max_size,\n \"regex\": args.regex,\n \"restore\": args.restore,\n \"rmdir\": args.rmdir,\n \"short\": args.short,\n \"silent\": args.silent,\n \"storage_time\": args.storage_time,\n \"wastebasket_path\": args.wastebasket_path\n }", "def setup_cmdline():\n config_file = Script.fullname + '.ini'\n if modUtils.linux():\n log_folder = '/var/log'\n elif modUtils.windows():\n log_folder = 'c:/Temp'\n else:\n log_folder = '.'\n\n parser = argparse.ArgumentParser(\n description='Cooling fan manager and MQTT client, version '\n + __version__\n )\n # Position arguments\n parser.add_argument(\n 'config',\n type=argparse.FileType('r'),\n nargs='?',\n default=config_file,\n help='Configuration INI file, default: ' + config_file\n )\n # Options\n parser.add_argument(\n '-V', '--version',\n action='version',\n version=__version__,\n help='Current version of the script.'\n )\n parser.add_argument(\n '-v', '--verbose',\n choices=['debug', 'info', 'warning', 'error', 'critical'],\n default='debug',\n help='Level of logging to the console.'\n )\n parser.add_argument(\n '-l', '--loglevel',\n choices=['debug', 'info', 'warning', 'error', 'critical'],\n default='debug',\n help='Level of logging to a log file.'\n )\n parser.add_argument(\n '-d', '--logdir',\n default=log_folder,\n help='Folder of a log file, default ' + log_folder\n )\n parser.add_argument(\n '-c', '--configuration',\n action='store_true',\n help=\"\"\"Print configuration parameters in form of INI file content.\"\"\"\n )\n # Process command line arguments\n global cmdline\n cmdline = parser.parse_args()", "def command_line_start(argv, program_name):\n cl_parser = argparse.ArgumentParser(description='Tinkerforge Data Logger')\n\n cl_parser.add_argument('config_file', help=\"Path to the configuration file\")\n cl_parser.add_argument('-v', action=\"store_true\", dest=\"validate\",\n help=\"Just process the validation of the configuration file\")\n\n results = cl_parser.parse_args(argv)\n\n arguments_map = {}\n arguments_map[CONSOLE_CONFIG_FILE] = results.config_file\n arguments_map[CONSOLE_VALIDATE_ONLY] = results.validate\n\n return arguments_map", "def parse_args():\n parser = argparse.ArgumentParser(\n description=\"pop-nedry Win64 shellcode build script\"\n )\n\n parser.add_argument(\n '--url', type=str, required=True,\n help='URL for web page hosting the Nedry GIF'\n )\n\n return parser.parse_args()", "def parse_command_line_args():\n parser = argparse.ArgumentParser(description=(\n 'HYAKUYOBAKO Data sender.'))\n parser.add_argument(\n '--project_id', required=True, help='GCP cloud project name')\n parser.add_argument(\n '--registry_id', required=True, help='Cloud IoT Core registry id')\n parser.add_argument(\n '--device_id', required=True, help='Cloud IoT Core device id')\n parser.add_argument(\n '--private_key_file',\n required=True,\n help='Path to private key file.')\n parser.add_argument(\n '--algorithm',\n choices=('RS256', 'ES256'),\n required=True,\n help='The encryption algorithm to use to generate the JWT.')\n parser.add_argument(\n '--cloud_region', default='us-central1', help='GCP cloud region')\n parser.add_argument(\n '--ca_certs',\n default='roots.pem',\n help=('CA root from https://pki.google.com/roots.pem'))\n parser.add_argument(\n '--message_type',\n choices=('event', 'state'),\n default='event',\n required=True,\n help=('Indicates whether the message to be published is a '\n 'telemetry event or a device state message.'))\n parser.add_argument(\n '--base_url',\n default=_BASE_URL,\n help=('Base URL for the Cloud IoT Core Device Service API'))\n parser.add_argument(\n '--jwt_expires_minutes',\n default=20,\n type=int,\n help=('Expiration time, in minutes, for JWT tokens.'))\n parser.add_argument(\n '--id',\n default=999,\n type=int,\n help=('Device id, not IoT Core device id for unique key.'))\n parser.add_argument(\n '--location_logitude',\n default=0.0,\n type=float,\n help=('Logitude of this deice. ex)35.658581'))\n parser.add_argument(\n '--location_latitude',\n default=0.0,\n type=float,\n help=('Latitude of this deice. ex)139.745433'))\n\n return parser.parse_args()", "def getCommandLineArgs():\n parser = argparse.ArgumentParser(prog=\"ccvalidator\",\n description=\"Validate and determine the issuer of a given credit card number\")\n parser.add_argument(\"card_num\", help=\"Credit card number\")\n\n return parser.parse_args()", "def cmd_stru(args):", "def __init__(self, commandline_args):\n self.commit = commandline_args['commit']\n self.registry = commandline_args['registry']\n self.version = commandline_args['xl_version']\n self.image_version = image_version(commandline_args['xl_version'], commandline_args['suffix'])", "def __main__(*args):\n config = parse_args(args)\n validate_config(config)\n apply_registration_settings(config)", "def main():\n args = parseCommandLineArguments()\n\n\n chemkin1 = args.chemkin1[0]\n speciesDict1 = args.speciesDict1[0]\n if args.thermo1: \n thermo1 = args.thermo1[0]\n else:\n thermo1 = None\n chemkin2 = args.chemkin2[0]\n speciesDict2 = args.speciesDict2[0]\n if args.thermo2: \n thermo2 = args.thermo2[0]\n else:\n thermo2 = None\n\n kwargs = {\n 'web': args.web,\n 'wd': os.getcwd()\n }\n\n execute(chemkin1, speciesDict1, thermo1, chemkin2, speciesDict2, thermo2, **kwargs)", "def GetArgs():\n \n UserArgs = {}\n UserArgs['help'] = False\n UserArgs['RsodFileName'] = \"\"\n UserArgs['BiosPathX64'] = \"\"\n\n for i in range(1,len(sys.argv)):\n if sys.argv[i].lower() == \"-help\" : UserArgs[\"help\"] = True\n elif sys.argv[i].lower() == \"-h\" : UserArgs[\"help\"] = True\n elif \"-rsodfile=\" in sys.argv[i].lower() : UserArgs['RsodFileName'] = sys.argv[i].split ('=', 1)[1]\n elif \"-biospathx64=\" in sys.argv[i].lower() : UserArgs['BiosPathX64'] = sys.argv[i].split ('=', 1)[1]\n\n return UserArgs", "def entry_point():\n\n\n plac.call(main)", "def initialize(self, args):\n\t\tpass", "def load(args):\n subprocess.check_call([\"/bin/launchctl\", \"load\"] + values.get(args))", "def command_line_args(parser):\n AbyssAssembler.command_line_args(parser)\n SpadesAssembler.command_line_args(parser)\n TrinityAssembler.command_line_args(parser)\n VelvetAssembler.command_line_args(parser)", "def handle_cmdline_args():\n\n parser = argparse.ArgumentParser(\n description='Generate synthetic data from a specification in a json '\n 'file using the \"synth-method\" described in the json file. ')\n\n parser.add_argument(\n '-i', dest='infile', required=True,\n help='The input json file. Must contain a \"synth-method\" property')\n\n parser.add_argument(\n '-o', dest='outfile_prefix', required=True, help='The prefix of the output paths (data json and csv), relative to the QUIPP-pipeline root directory')\n\n args = parser.parse_args()\n return args", "def main():\n # n.b. apps frozen with python3.8 get this far when\n # double clicked (CLI opening is ok)\n\n parser = ArgumentParser(prog='NottReal')\n parser.add_argument(\n '-l',\n '--log',\n choices={'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'},\n default='INFO',\n help='Minimum level of log output.')\n parser.add_argument(\n '-c',\n '--config_dir',\n default='cfg.nrc',\n type=ArgparseUtils.dir_contains_config,\n help='Directory containing the configuration files')\n parser.add_argument(\n '-d',\n '--output_dir',\n default=None,\n type=ArgparseUtils.dir_is_writeable,\n help='Directory to dump logs from spoken text (disabled by default)')\n parser.add_argument(\n '-r',\n '--recognition',\n default=None,\n help='Speech-to-text recognition system to use')\n parser.add_argument(\n '-v',\n '--voice',\n default=None,\n help='Voice synthesis library to use')\n parser.add_argument(\n '-o',\n '--output_win',\n default='disabled',\n help='Show an output window on opening')\n parser.add_argument(\n '-ns',\n '--nostate',\n default=False,\n action='store_true',\n help='Disable automatic state saving in config directory')\n parser.add_argument(\n '-dev',\n '--dev',\n action='store_true',\n help='Enable developer mode/disable catching of errors')\n args = parser.parse_args()\n\n Logger.init(getattr(Logger, args.log))\n Logger.info(__name__, \"Hello, World\")\n Logger.info(__name__, str(sys.argv))\n\n ArgparseUtils.init_darwin()\n\n App(args)\n\n Logger.info(__name__, \"Goodbye, World\")\n sys.exit(0)", "def initialize(args):\n # save the data directory in ribfrac/environ.py\n data_dir = args.data_dir\n with open(\"ribfrac/environ.py\", \"w\") as f:\n f.write(f'DATA_DIR = \"{args.data_dir}\"')\n\n # create data_dir if it doesn't exist\n if not os.path.exists(data_dir):\n print(f\"Data directory {data_dir} doesn't exist and is automatically\"\n \" created.\")\n os.mkdir(data_dir)", "def process_command_line():\n\n # Add the command lien arguments\n parser = argparse.ArgumentParser(description=\"test autocontext\", formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n\n # Arguments\n parser.add_argument(\"--ilastik\", type=str, required=True,\n help=\"path to the file run_ilastik.sh\")\n\n parser.add_argument(\"--train\", type=str,\n help=\"path to the ilastik project that will be used for training\")\n\n parser.add_argument(\"--batch_predict\", type=str,\n help=\"path of the cache folder of a previously trained autocontext that will be used for batch \"\n \"prediction\")", "def _Args(parser,\n include_l7_internal_load_balancing=False,\n support_managed_certs=False):\n parser.add_argument(\n '--description',\n help='An optional, textual description for the SSL certificate.')\n\n parser.display_info.AddCacheUpdater(\n flags.SslCertificatesCompleterBeta\n if include_l7_internal_load_balancing else flags.SslCertificatesCompleter)\n\n if support_managed_certs:\n managed_or_not = parser.add_group(\n mutex=True,\n required=True,\n help='Flags for managed or self-managed certificate. ')\n\n managed_or_not.add_argument(\n '--domains',\n metavar='DOMAIN',\n type=arg_parsers.ArgList(min_length=1),\n default=[],\n help=\"\"\"\\\n List of domains to create a managed certificate for.\n \"\"\")\n\n not_managed = managed_or_not.add_group('Flags for self-managed certificate')\n not_managed.add_argument(\n '--certificate',\n metavar='LOCAL_FILE_PATH',\n required=True,\n help=\"\"\"\\\n Path to a local certificate file to create a self-managed\n certificate. The certificate must be in PEM format. The certificate\n chain must be no greater than 5 certs long. The chain must include at\n least one intermediate cert.\n \"\"\")\n not_managed.add_argument(\n '--private-key',\n metavar='LOCAL_FILE_PATH',\n required=True,\n help=\"\"\"\\\n Path to a local private key file. The private key must be in PEM\n format and must use RSA or ECDSA encryption.\n \"\"\")\n else:\n parser.add_argument(\n '--certificate',\n required=True,\n metavar='LOCAL_FILE_PATH',\n help=\"\"\"\\\n Path to a local certificate file. The certificate must be in PEM\n format. The certificate chain must be no greater than 5 certs long. The\n chain must include at least one intermediate cert.\n \"\"\")\n\n parser.add_argument(\n '--private-key',\n required=True,\n metavar='LOCAL_FILE_PATH',\n help=\"\"\"\\\n Path to a local private key file. The private key must be in PEM\n format and must use RSA or ECDSA encryption.\n \"\"\")", "def base_init(new_args):\n global args\n args = new_args\n # UTF-8 support\n if sys.version_info < (3, 0):\n sys.stderr = codecs.getwriter('UTF-8')(sys.stderr)\n sys.stdout = codecs.getwriter('UTF-8')(sys.stdout)\n sys.stdin = codecs.getreader('UTF-8')(sys.stdin)\n logging.warn(\"SGNMT is tested with Python 3, but you are using \"\n \"Python 2. Expect the unexpected or switch to >3.5.\")\n # Set up logger\n logger = logging.getLogger(__name__)\n logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s')\n logging.getLogger().setLevel(logging.INFO)\n if args.verbosity == 'debug':\n logging.getLogger().setLevel(logging.DEBUG)\n elif args.verbosity == 'info':\n logging.getLogger().setLevel(logging.INFO)\n elif args.verbosity == 'warn':\n logging.getLogger().setLevel(logging.WARN)\n elif args.verbosity == 'error':\n logging.getLogger().setLevel(logging.ERROR)\n # Set reserved word IDs\n utils.switch_to_fairseq_indexing()\n \n ui.validate_args(args)\n if args.run_diagnostics:\n ui.run_diagnostics()\n sys.exit()", "def _start(args=None):\n options = _parse_args(args)\n main(**options)", "def validate_args(args):\n setup_logging(args.verbose)\n log.debug('Raw arguments:\\n{}'.format(args))\n\n # Check if pipeline file exists\n args.pipeline = Path(args.pipeline)\n\n if not args.pipeline.is_file():\n log.error('No such file {}'.format(args.pipeline))\n exit(1)\n\n args.pipeline = args.pipeline.resolve()\n\n return args", "def _prepare(self):\n # Customize commandline arguments\n parser = argparse.ArgumentParser()\n self.initArgumentParser(parser, defaults=self.default_binding_overrides)\n self.__options = parser.parse_args()\n self.__bindings.update(args_util.parser_args_to_bindings(self.__options))\n\n self.start_logging()", "def initialize():\n\n global cmdarg\n # Open syslog for error message tracking\n syslog.openlog(\"munin-chrony\", 0, syslog.LOG_DAEMON)\n\n # Try to get the command-line argument, if there is one (usually either\n # 'config' or nothing)\n try:\n cmdarg = sys.argv[1]\n except IndexError:\n # It's not actually an error if this is out of range -- it just means\n # there wasn't an argument, so don't run in config mode\n cmdarg = \"\"", "def build_pyinstaller_arguments(\n python_executable_path, starcraft2_path, python_sc2_path, run_file_path, output_folder_path, ecryption_key_path\n) -> list:\n arguments = []\n\n arguments.append(python_executable_path)\n\n # Disable asserts, does not seem to work anymore?\n # arguments.append(\"-OO\")\n\n arguments.append(\"-m\")\n arguments.append(\"PyInstaller\")\n\n arguments.append(\"--onefile\")\n\n sc2_dlls = [\"icuin52.dll\", \"icuuc52.dll\", \"icudt52.dll\"]\n for sc2_dll in sc2_dlls:\n dll_path = os.path.join(starcraft2_path, sc2_dll)\n arguments.append(\"--add-binary\")\n arguments.append(f\"{dll_path};.\")\n\n arguments.append(\"--add-data\")\n arguments.append(f\"{python_sc2_path};sc2\")\n\n # Add scipy dlls\n print(f\"Scipy folder: {scipy_dlls_path.absolute()}\")\n for dll_path in scipy_dlls_path.iterdir():\n arguments.append(\"--add-binary\")\n arguments.append(f\"{dll_path};.\")\n\n # Add paths that may be encrypted during the packing process\n arguments.append(\"--paths\")\n arguments.append(f\"{bot_folder_path}\")\n\n arguments.append(\"--clean\")\n arguments.append(\"--noconfirm\")\n\n # Encryption using the --key=mykey pyinstaller parameter. Checks if pycrypto is installed\n try:\n import pycrypt\n\n encryption_key = \"\"\n if os.path.isfile(ecryption_key_path):\n with open(ecryption_key_path) as f:\n # Remove possibly unwanted newline, tab or space characters, as they also might be invalid characters\n encryption_key = f.read().strip(\" \\n\\t\")\n if encryption_key != \"\":\n arguments.append(\"--key\")\n arguments.append(encryption_key)\n except ImportError:\n print(\"Pycrypto not detected. Skipping encryption\")\n\n arguments.append(\"--distpath\")\n arguments.append(f\"{output_folder_path}\")\n\n arguments.append(f\"{run_file_path}\")\n\n assert all(isinstance(i, str) for i in arguments)\n\n print(\"Pyinstaller Arguments: {}\".format(arguments))\n print(\"Exact command line call: {}\".format(\" \".join(arguments)))\n return arguments", "def main():\n\n # Fix crackling audio\n util.set_environment('PULSE_LATENCY_MSEC', '60')\n\n # Replace launcher with game exe in proton arguments\n util.replace_command('FF9_Launcher.exe', 'x64/FF9.exe')", "def main_argv():\n main_parse_args(sys.argv[1:])", "def main_argv():\n main_parse_args(sys.argv[1:])", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"URI\")\n args = parser.parse_args()\n smart_client(args.URI)", "def add_args(parser):\r\n parser.add_argument(\"data\", help=\"path to data directory\")\r\n parser.add_argument(\r\n \"--silence-token\", default=\"\\u2581\", help=\"token for silence (used by w2l)\"\r\n )\r\n parser.add_argument(\r\n \"--max-source-positions\",\r\n default=sys.maxsize,\r\n type=int,\r\n metavar=\"N\",\r\n help=\"max number of frames in the source sequence\",\r\n )\r\n parser.add_argument(\r\n \"--max-target-positions\",\r\n default=1024,\r\n type=int,\r\n metavar=\"N\",\r\n help=\"max number of tokens in the target sequence\",\r\n )", "def add_arguments(cls, parser):\n super(ShellScript, cls).add_arguments(parser)\n\n parser.add_argument(\n '-u', '--username', type=str, help='Clarity LIMS username', required=True\n )\n\n parser.add_argument(\n '-p', '--password', type=str, help='Clarity LIMS password', required=True\n )\n\n parser.add_argument(\n '-r', '--lims-root-uri', type=str, help='URI of Clarity LIMS (ending in /api/v2/)', required=True\n )\n\n parser.add_argument(\n '--insecure', action='store_true', help='Disables SSL Certificate validation.', required=False,\n )", "def ParseCommandArguments(args):\n\n\n\n import argparse\n from google.appengine.tools import boolean_action\n\n parser = argparse.ArgumentParser()\n parser.add_argument('-A', '--application', required=True)\n parser.add_argument('--api_host', default='')\n\n parser.add_argument('--api_port', default=8000, type=int)\n parser.add_argument('--trusted',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n parser.add_argument('--application_root', default=None)\n parser.add_argument('--application_host', default='localhost')\n parser.add_argument('--application_port', default=None)\n\n\n parser.add_argument('--blobstore_path', default=None)\n\n\n parser.add_argument('--datastore_path', default=None)\n\n parser.add_argument('--auto_id_policy', default='scattered',\n type=lambda s: s.lower(),\n choices=(datastore_stub_util.SEQUENTIAL,\n datastore_stub_util.SCATTERED))\n\n parser.add_argument('--use_sqlite',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n parser.add_argument('--high_replication',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n parser.add_argument('--require_indexes',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n parser.add_argument('--clear_datastore',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n\n\n parser.add_argument('--logs_path', default=None)\n\n\n parser.add_argument('--enable_sendmail',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n parser.add_argument('--smtp_host', default='')\n\n parser.add_argument('--smtp_port', default=25, type=int)\n parser.add_argument('--smtp_user', default='')\n parser.add_argument('--smtp_password', default='')\n parser.add_argument('--show_mail_body',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n\n\n parser.add_argument('--prospective_search_path', default=None)\n parser.add_argument('--clear_prospective_search',\n action=boolean_action.BooleanAction,\n const=True,\n default=False)\n\n\n parser.add_argument('--enable_task_running',\n action=boolean_action.BooleanAction,\n const=True,\n default=True)\n\n parser.add_argument('--task_retry_seconds', default=30, type=int)\n\n\n parser.add_argument('--user_login_url', default=None)\n parser.add_argument('--user_logout_url', default=None)\n\n return parser.parse_args(args)", "def add_cli_args(parser):\n parser.add_argument(\n '--raw_path',\n help='Source path where audio data files are stored',\n default=RAW_DATA_PATH\n )\n parser.add_argument(\n '--features_path',\n help='Output path where exported data will be placed',\n default=FEATURES_DATA_PATH\n )\n parser.add_argument(\n '--feature',\n help='name of the feature to be extracted (options: mfsc, leglaive)',\n default=VoiceActivationFrameSelectionFeatureExtractor.feature_name\n )", "def build_cmdline():\n\tcmd=optparse.OptionParser(version=__version__)\n\tcmd.add_option('-c', '', dest='config_fname',type=\"string\", help='WHM/WHMCS configuration file', metavar=\"FILE\")\n\tcmd.add_option('-s', '', dest=\"whm_section\", type=\"string\", help=\"WHM server to use. Specify section name. eg: -s ds01\", metavar=\"SERVER\")\n\tcmd.add_option('','--search', action=\"store\", dest='search', type=\"string\", help=\"Search client by DNS domain name or cPanel username\", metavar=\"STRING\")\n\tcmd.add_option('-d', '', dest='whmcs_deptid', type=\"int\", help=\"WHMCS Department ID\", metavar=\"INT\") \n\tcmd.add_option('-m', '', dest='whmcs_ticketmsg_fname', type=\"string\", help=\"WHMCS abuse ticket template file\", metavar='FILE')\n\tcmd.add_option('-r', '', dest='whm_suspendmsg_fname', type=\"string\", help='cPanel account suspension reason template file', metavar='FILE')\n\tcmd.add_option('-f', '', dest='whmcs_proofmsg_fname', type=\"string\", help='Abuse proof file which will be appended to abuse ticket message', metavar='FILE')\n\tcmd.add_option('', '--subject', dest='whmcs_subject', type=\"string\", help='Specify abuse ticket subject title.', metavar=\"STRING\")\n\tcmd.add_option('-y', '--allyes', dest='allyes', action=\"store_true\", default=False, help='Assume yes as an answer to any question which would be asked')\n\treturn cmd", "def options():\n\n parser = argparse.ArgumentParser(description=\"PlantCV Clowder image analysis script for the DDPSC indoor system.\",\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n #parser.add_argument(\"-v\", \"--vis\", help=\"Input VIS/RGB image.\", required=True)\n #parser.add_argument(\"-n\", \"--nir\", help=\"Input NIR image.\", required=True)\n #parser.add_argument(\"-p\", \"--perspective\", help=\"Camera perspective (side-view, top-view)\", required=True)\n parser.add_argument(\"-d\", \"--dataset\", help=\"Clowder Dataset key.\", required=True)\n parser.add_argument(\"-u\", \"--url\", help=\"Clowder URL.\", required=True)\n parser.add_argument(\"-U\", \"--username\", help=\"Clowder username.\", required=True)\n parser.add_argument(\"-p\", \"--password\", help=\"Clowder password.\", required=True)\n\n args = parser.parse_args()\n\n # if not os.path.exists(args.vis):\n # raise IOError(\"File does not exist: {0}\".format(args.vis))\n # if not os.path.exists(args.nir):\n # raise IOError(\"File does not exist: {0}\".format(args.nir))\n\n return args", "def init(args: Optional[List[bytes]] = None) -> None:\n warnings.warn(_deprecation_warning(), FutureWarning)\n parsed = {}\n if args:\n for arg in args:\n kv = arg.decode().split('=')\n if len(kv) == 2:\n parsed[kv[0]] = kv[1]\n collective.init(**parsed)", "def cli(*args, **kwargs):\n logger.debug('Global options: %s %s', args, kwargs)", "def func_update_arguments(self, arg_raw ):\n\n arg_raw.prog = \"DISCASM\"\n arg_raw.description = \"extracts genome-aligned discordant and unmapped reads, and de novo assembles them\"\n\n arg_raw.add_argument(\"--chimeric_junctions\", dest=\"chimeric_junctions\", required=True, help=\"STAR Chimeric.out.junction file\")\n arg_raw.add_argument(\"--aligned_bam\", dest=\"aligned_bam_filename\", required=False, help=\"aligned bam file from your favorite rna-seq alignment tool\")\n arg_raw.add_argument(\"--left_fq\", dest=\"left_fq_filename\", required=True, help=\"left fastq file\")\n arg_raw.add_argument(\"--right_fq\", dest=\"right_fq_filename\", required=True, help=\"right fastq file\")\n arg_raw.add_argument(\"--out_dir\", dest=\"str_out_dir\", required=True, help=\"output directory\")\n arg_raw.add_argument(\"--denovo_assembler\", dest=\"denovo_assembler\", required=True, help=\"de novo assembly method: Trinity|Oases|OasesMultiK\")\n arg_raw.add_argument(\"--add_trinity_params\", dest=\"add_trinity_params\", required=False, help=\"any additional parameters to pass on to Trinity if Trinity is the chosen assembler.\")\n arg_raw.add_argument(\"--normalize_reads\", default=False, action='store_true', help='perform in silico normalization prior to de novo assembly')\n\n return(arg_raw)", "def test_main_required_args(self):\n args = [\"in_toto_keygen.py\"]\n\n with patch.object(sys, 'argv', args + [\"bob\"]), \\\n self.assertRaises(SystemExit):\n in_toto_keygen_main()", "def parse_args(args=None):\n\n\tparser = argparse.ArgumentParser(description=\"A simple python based static generator.\")\n\tparser.add_argument(\"--init\", action=\"store_true\", help=\"Initialize project.\")\n\tparser.add_argument(\"--gen\", action=\"store_true\", help=\"Generate static.\")\n\tparser.add_argument(\"--collect-static\", action=\"store_true\", help=\"Collect static.\")\n\n\tif args:\n\t\treturn parser.parse_args(args)\n\treturn parser.parse_args()", "def command_line (argsv = None) :\n from _TFL.Command_Line import Command_Line\n return Command_Line \\\n ( arg_spec = ( \"src_file:P\", )\n , option_spec =\n ( \"skip_docstrings:B?Do not mark docstrings for translation\"\n , \"translation_markup:S,=_,_T,_Tn,_Tl,_Tln\"\n )\n , help_on_err = True\n )", "def parse_args():\n # Define what commandline arguments can be accepted\n parser = argparse.ArgumentParser()\n parser.add_argument(Flags.CSV_DIR,metavar=\"CSV_DIRECTORY\", type=check_str_is_dir,\n help=\"Source directory containing Digikey CSV files\")\n parser.add_argument(Flags.PDF_DIR,metavar=\"PDF_DIRECTORY\", type=check_str_is_dir,\n help=\"Directory to save the PDF datasheets to\")\n parser.add_argument('--csv_pages', dest=Flags.CSV_PAGES,metavar=\"NUM_PAGES\", type=int, default=1,\n help=\"How many 500-row pages to download from Digikey (default 1)\")\n parser.add_argument('--fv_code', dest=Flags.FV_CODE,metavar=\"FV_CODE\", default='ffe002af', #op-amp\n help=\"The FV code of the part family on Digikey (default op-amps)\")\n parser.add_argument('--encrypted', dest=Flags.KEEP_ENCRYPTED, action='store_true', default=False, help=\"Do not filter encrypted PDFs\")\n parser.add_argument('--skip_csv', dest=Flags.SKIP_CSV_DL, action='store_true', default=False, help=\"Do not redownload the CSV.\")\n parser.add_argument('--skip_pdf', dest=Flags.SKIP_PDF_DL, action='store_true', default=False, help=\"Do not redownload the PDFs.\")\n parser.add_argument('--ocr', dest=Flags.KEEP_OCR, action='store_true', default=False, help=\"Do not filter PDFs that need OCR\")\n parser.add_argument('--duplicates', dest=Flags.KEEP_DUPLICATES, action='store_true', default=False, help=\"Do not filter duplicate PDFs (NOT IMPLEMENTED)\")\n parser.add_argument('--version', action='version', version='%(prog)s 0.0.0')\n args = vars(parser.parse_args())\n\n # TODO (lwhsiao): We should also add option to automatically select a parameterized\n # number of files and organize as train/test/dev\n\n Flags.parsed_args = args\n return args", "def main(argv: List[str]) -> None:\n parser = utils.ArgumentParser()\n parser.add_ecore_root_argument(\"-r\", \"--root\")\n\n parser.parse_args(argv[1:])\n generate_yaml()", "def GetArgs():\n\n parser = argparse.ArgumentParser(description='Process args for connecting to vCenter')\n parser.add_argument('-v', '--vc', required=True, action='store', help='vCenter')\n parser.add_argument('-u', '--user', required=True, action='store', help='vCenter Administrator')\n parser.add_argument('-p', '--password', required=False, action='store', help='Password')\n args = parser.parse_args()\n return args" ]
[ "0.60164946", "0.5917256", "0.5911841", "0.5911841", "0.5905188", "0.5905188", "0.58483124", "0.58254886", "0.570544", "0.5692321", "0.5691585", "0.56263906", "0.5617899", "0.5611776", "0.56115735", "0.55883586", "0.55690145", "0.55671674", "0.5558053", "0.54855394", "0.54752517", "0.5452282", "0.5448891", "0.5407667", "0.54016346", "0.53753513", "0.5324714", "0.5318539", "0.5313541", "0.5307652", "0.53042394", "0.5288293", "0.5261908", "0.5259102", "0.5247603", "0.5245464", "0.5237439", "0.5234123", "0.523212", "0.520901", "0.520901", "0.52087545", "0.52087545", "0.518427", "0.51819414", "0.5177735", "0.51661694", "0.51661694", "0.51661694", "0.51615196", "0.5151427", "0.51420975", "0.5141787", "0.5138453", "0.5135433", "0.5133895", "0.5130438", "0.51159775", "0.5111341", "0.5108273", "0.5103335", "0.5102683", "0.51018906", "0.5093983", "0.50938594", "0.508722", "0.50818646", "0.5077056", "0.50751877", "0.5063804", "0.5062938", "0.5062124", "0.5050465", "0.5046301", "0.5044797", "0.5034636", "0.5034562", "0.50245124", "0.5020434", "0.50117004", "0.5006794", "0.5003628", "0.50027484", "0.5001195", "0.5001195", "0.49988812", "0.4996739", "0.4991542", "0.49885365", "0.4987915", "0.49877346", "0.49860245", "0.49846947", "0.49818915", "0.49817446", "0.4969272", "0.49689457", "0.49671495", "0.49636778", "0.4961516", "0.49606618" ]
0.0
-1
Dump .crt and .csr files.
def show_command(args): for fn in args.files: ext = os.path.splitext(fn)[1].lower() if ext == '.csr': cmd = ['openssl', 'req', '-in', fn, '-text'] elif ext == '.crt': cmd = ['openssl', 'x509', '-in', fn, '-text'] else: die("Unsupported file: %s", fn) subprocess.check_call(cmd)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dump_to_disk(self, prefix):\n\n f = open(prefix + rpki.sundial.now().isoformat() + \"Z.cms\", \"wb\")\n f.write(self.get_DER())\n f.close()", "def dump_vecs():\n v_file = os.path.join(TMP_DIR, 'vectorizer.pickle')\n d_file = os.path.join(TMP_DIR, 'dectorizer.pickle')\n f_file = os.path.join(TMP_DIR, 'freq.pickle')\n \n with open(v_file, 'wb') as f:\n pickle.dump(VECTORIZER, f)\n with open(d_file, 'wb') as f:\n pickle.dump(CECTORIZER, f)", "def save_sparse_csr(filename,array, labels, vocab):\n np.savez(filename,data = array.data ,indices=array.indices,\n indptr =array.indptr, shape=array.shape, labels=labels, vocab=vocab)", "def CreateCrtFile(keyfile, csrfile):\n crtfile = tempfile.mkstemp()[1]\n cmd = [\n 'openssl',\n 'x509',\n '-req',\n '-days', '1',\n '-in', csrfile,\n '-signkey', keyfile,\n '-out', crtfile\n ]\n _RunCommand(cmd)\n return crtfile", "def write_out(c2ptmk, ofn):\n print \"Writing out to [{}]\".format(ofn)\n with codecs.open(ofn, \"w\", \"utf8\") as ofd:\n for co, infos in sorted(c2ptmk.items()):\n ofd.write(u\"{}\\t{}\\t{}\\n\".format(\n co, infos[\"uri\"], \",\".join(\n [unicode(x) for x in infos[\"ptmks\"]])))", "def catalogDump(out):\n if out is not None: out.flush()\n libxml2mod.xmlCatalogDump(out)", "def save_chain(self):\n pprint('saving to file named bc_file.txt')\n with open('ddos_bc_file.txt', 'w') as output:\n output.write(serializer.serialize(self.chain))", "def pickle_dump_files():\n with open('data/' + dataset_name + '_' + model_name + '_' + 'predictions', 'wb') as f:\n pickle.dump(predictions, f)\n with open('data/' + dataset_name + '_' + model_name + '_' + 'state_sentences', 'wb') as f:\n pickle.dump(final_state_sentences, f)\n with open('data/' + dataset_name + '_' + model_name + '_' + 'decoded_sentences', 'wb') as f:\n pickle.dump(final_decoded_sentences, f)\n with open('data/' + dataset_name + '_' + model_name + '_' + 'ids', 'wb') as f:\n pickle.dump(idx, f)\n with open('data/' + dataset_name + '_' + model_name + '_' + 'exemplars', 'wb') as f:\n pickle.dump(exemplars, f)\n with open('data/' + dataset_name + '_' + model_name + '_' + 'counter_exemplars', 'wb') as f:\n pickle.dump(counter_exemplars, f)\n with open('data/' + dataset_name + '_' + model_name + '_' + 'top_exemplar_words', 'wb') as f:\n pickle.dump(top_exemplar_words, f)\n with open('data/' + dataset_name + '_' + model_name + '_' + 'top_counter_exemplar_words', 'wb') as f:\n pickle.dump(top_counter_exemplar_words, f)", "def dump(self):\n if self.jfile != None: \n with open(\"%s.json\" % self.jfile, 'w') as fd:\n json.dump([self.buf_i, self.buf], fd) \n if self.cas != None: \n self.cas.dump()", "def dump(self, filename, mode='w', rebox=False):\n from os import path\n filepath = path.abspath(path.expanduser(filename))\n if mode == 'w':\n open(filepath, 'w').close() \n for t, ts in self:\n ts.dump(filename, rebox=rebox)", "def dump(self, args):\n if self.stru:\n self.stru.dump(args)\n if self.index:\n self.index.dump(args)\n if self.bank:\n self.bank.dump(args)\n if self.sys:\n self.sys.dump(args)", "def dump():\n celex_root = sys.argv[1]\n lang = sys.argv[2]\n mode = sys.argv[3]\n\n creader = CelexDB(celex_root, lang)\n\n if mode == \"infl\":\n print 'Inflectional word sets:'\n for head, words in sorted(creader.lemma_map.items(), key=lambda x: x[0].lower()):\n print \"%s: %s\" % (head, ', '.join(words))\n elif mode == \"deriv\":\n print 'Inflectional/Derivational word sets:'\n for root, words in sorted(creader.root_map.items(), key=lambda x: x[0].lower()):\n print \"%s: %s\" % (root, ', '.join(words))\n elif mode == \"freq\":\n sorted_word_freqs = sorted(creader.word_freqs.items(), key=lambda x: x[1], reverse=True)\n maxlen = None\n for word, freq in sorted_word_freqs:\n if not maxlen:\n maxlen = len(str(freq))\n print (\"%\" + str(maxlen) + \"d\") % freq, word\n else:\n print >> sys.stderr, \"Unknown testing mode:\", mode\n sys.exit(64)", "def test_file_dump():\n \n subject = 'Test Message Subject'\n target_path = '/testdir/'\n label = 'MessageLabel'\n\n _file_dump()\n _file_dump(subject=subject)\n _file_dump(label=label)\n _file_dump(target_path=target_path)\n _file_dump(subject=subject, label=label)\n _file_dump(subject=subject, target_path=target_path)\n _file_dump(label=label, target_path=target_path)\n _file_dump(subject=subject, label=label, target_path=target_path)", "def writeCADFile(self, filename):\n valid_filetypes = [\"brep\", \"bstl\", \"egads\", \"egg\", \"iges\", \"igs\", \"sens\", \"step\", \"stl\", \"stp\", \"tess\", \"grid\"]\n file_extension = filename.split(\".\")[-1]\n if file_extension.lower() not in valid_filetypes:\n raise OSError(\n \"CAD filename \"\n + filename\n + \" must have a valid exension. \"\n + \"Consult the EngineeringSketchPad docs for the DUMP function\"\n )\n if self.comm.rank == 0:\n modelCopy = self.espModel.Copy()\n n_branches, _, _ = modelCopy.Info()\n modelCopy.NewBrch(\n n_branches, modelCopy.GetCode(\"dump\"), \"<none>\", 0, filename, \"0\", \"0\", \"0\", \"\", \"\", \"\", \"\", \"\"\n )\n modelCopy.Build(0, 0)", "def to_file(c, path, seq_types=None):\n with open(path, \"w\") as f:\n f.write(circuit_to_verilog(c, seq_types))", "def dump_stream(recs, path):\n with copen(path, 'w', 'utf-8') as out:\n for rec in recs:\n out.write(rec)\n out.write('\\n')", "def save_reconstructions(reconstructions, out_dir):\n out_dir.mkdir(exist_ok=True)\n for fname, recons in reconstructions.items():\n file_path = out_dir/fname\n np.save(file_path,recons)", "def save(save_path: str, features: List[List[int]]):\n features = np.stack(features)\n sparse_features = sparse.csr_matrix(features)\n\n with open(save_path, 'wb') as f:\n pickle.dump(sparse_features, f)", "def save_ca():\n cert_file = os.environ.get('HOME') + '/.cat_installer/ca.pem'\n debug(\"saving cert\")\n with open(cert_file, 'w') as cert:\n cert.write(Config.CA + \"\\n\")", "def dump(self):\n # dump self.data\n pickle.dump(self.data, open(self.data_dir + DATA_PATH, 'wb+'))\n # dump self.code2desc\n pickle.dump(self.code2desc, open(self.data_dir + CODE2DESC_PATH, 'wb+'))\n # dump self.family2tf\n pickle.dump(self.family2tf, open(self.data_dir + FAMILY2TF_PATH, 'wb+'))\n # dump self.word2tf\n pickle.dump(self.word2tf, open(self.data_dir + WORD2TF_PATH, 'wb+'))\n # dump self.word2df\n pickle.dump(self.word2df, open(self.data_dir + WORD2DF_PATH, 'wb+'))\n return None", "def collect_full_core_dump(core_dump_dir=\"\", filename=\"\"):\r\r\n\r\r\n loggerModem = logging.getLogger(__name__ + 'collect_full_core_dump')\r\r\n\r\r\n core_dump_path = (os.path.join(core_dump_dir, filename))\r\r\n\r\r\n icera_tools_bin_path = os.sep.join(os.environ['PL1_WCDMA_TEST_ROOT'].split(os.sep)[:]+['common', 'icera'])\r\r\n\r\r\n get_crash_dump_log(core_dump_dir=core_dump_dir, icera_utils_path=icera_tools_bin_path)", "def save_file(map_, args): \n if args.segments:\n p = os.path.join(args.res_dir, 'compression_'+args.db+\"_seg\")\n else:\n p = os.path.join(args.res_dir, 'compression_'+args.db)\n with open(p, 'w') as f:\n for file in map_:\n f.write(\"{} {}\\n\".format(file, map_[file]))", "def core_dump(self):\r\r\n loggerModem = logging.getLogger(__name__ + 'core_dump')\r\r\n cmd_l=[r'at%debug=0', r'at%debug=2']\r\r\n cmd_str='\\r\\n'.join(cmd_l)\r\r\n\r\r\n text_str = \"AT command\"\r\r\n if self.dumpfile:\r\r\n loggerModem.debug(\"Core file : %s\" % self.dumpfile)\r\r\n loggerModem.debug(\"%-15s:\\t%s\" %(text_str, cmd_str))\r\r\n with open(self.dumpfile, 'wb') as fd:\r\r\n cmd_str = cmd_str + '\\r\\n'\r\r\n self.serObj.write(cmd_str) # write a string\r\r\n len_rd=0\r\r\n response = self.serObj.read(2**16)\r\r\n while len(response)>0:\r\r\n len_rd += len(response)\r\r\n loggerModem.debug(\"read %s bytes, current_len=%s\" % (len(response), len_rd))\r\r\n fd.write(response)\r\r\n response = self.serObj.read(2**16)\r\r\n loggerModem.info(\"Created core dump: %s\" % self.dumpfile)\r\r\n else:\r\r\n loggerModem.info(\"No core dump as no dump file specified!\")", "def write_scram_toolfile(self, contents, filename):\n with open(self.spec.prefix.etc + '/scram.d/' + filename, 'w') as f:\n f.write(contents)\n f.close()", "def dump(self, model_path):\n pickle.dump(self.scaler, gzip.open(os.path.join(model_path, 'scaler.pkl.gz'), 'w'),\n protocol=pickle.HIGHEST_PROTOCOL)\n# pickle.dump(self.mapper, gzip.open(os.path.join(model_path, 'mapper.pkl.gz'),'w'),\n# protocol=pickle.HIGHEST_PROTOCOL)\n pickle.dump(self.batcher, gzip.open(os.path.join(model_path, 'batcher.pkl.gz'), 'w'),\n protocol=pickle.HIGHEST_PROTOCOL)", "def export_ctsdg(cfg):\n generator = Generator(\n image_in_channels=config.image_in_channels,\n edge_in_channels=config.edge_in_channels,\n out_channels=config.out_channels\n )\n generator.set_train(False)\n load_checkpoint(cfg.checkpoint_path, generator)\n\n ckpt_path = Path(cfg.checkpoint_path)\n output_file_name = (ckpt_path.parent / ckpt_path.stem).as_posix()\n file_format = config.file_format\n\n img_dummy = mnp.zeros([1, config.image_in_channels, *cfg.image_load_size],\n dtype=mstype.float32)\n edge_dummy = mnp.zeros([1, 2, *cfg.image_load_size], dtype=mstype.float32)\n mask_dummy = mnp.zeros([1, 1, *cfg.image_load_size], dtype=mstype.float32)\n\n export(generator, img_dummy, edge_dummy, mask_dummy,\n file_name=output_file_name, file_format=file_format)\n\n print(f'{output_file_name}.mindir exported successfully!', flush=True)", "def export(course, export_dir):\r\n fs = OSFS(export_dir, create=True)\r\n if not fs.isdirempty('.'):\r\n print ('WARNING: Directory {dir} not-empty.'\r\n ' May clobber/confuse things'.format(dir=export_dir))\r\n\r\n try:\r\n course.runtime.export_fs = fs\r\n root = lxml.etree.Element('root')\r\n course.add_xml_to_node(root)\r\n with fs.open('course.xml', mode='w') as f:\r\n root.write(f)\r\n\r\n return True\r\n except:\r\n print 'Export failed!'\r\n traceback.print_exc()\r\n\r\n return False", "def DumpAbi(output_dir, input_files, product_dir, archs, dumper_dir):\n # Get names of the libraries to dump\n lib_names = []\n for input_file in input_files:\n if input_file.endswith(\".so\"):\n lib_names.append(input_file)\n else:\n with open(input_file, \"r\") as lib_list:\n lib_names.extend(line.strip() for line in lib_list\n if line.strip())\n # Create the dumps\n for arch in archs:\n lib_dir = GetSystemLibDirByArch(product_dir, arch)\n dump_dir = os.path.join(output_dir, arch)\n if not os.path.exists(dump_dir):\n os.makedirs(dump_dir)\n for lib_name in lib_names:\n lib_path = os.path.join(lib_dir, lib_name)\n symbol_dump_path = os.path.join(dump_dir, lib_name + \"_symbol.dump\")\n vtable_dump_path = os.path.join(dump_dir, lib_name + \"_vtable.dump\")\n print(lib_path)\n print(DumpSymbols(lib_path, symbol_dump_path))\n print(DumpVtables(lib_path, vtable_dump_path, dumper_dir))\n print(\"\")", "def generate(self):\n self._open_file()\n # copied from GenerateCSPEC.py\n self._write_header_and_defaults()\n self._write_source()\n self._write_sample()\n\n self._write_all_components()\n self._write_mantle_module()\n self._write_segment()\n self._write_all_ids()\n self._write_footer()\n self._close_file()", "def write_scram_toolfiles(self):\n from string import Template\n\n mkdirp(join_path(self.spec.prefix.etc, 'scram.d'))\n\n values = {}\n values['VER'] = self.spec.version\n values['PFX'] = self.spec.prefix\n\n fname = 'uuid-cms.xml'\n template = Template(\"\"\"<tool name=\"uuid\" version=\"$VER\">\n <lib name=\"uuid\"/>\n <client>\n <environment name=\"LIBUUID_BASE\" default=\"$PFX\"/>\n <environment name=\"LIBDIR\" default=\"$$LIBUUID_BASE/lib\"/>\n <environment name=\"INCLUDE\" default=\"$$LIBUUID_BASE/include\"/>\n </client>\n <runtime name=\"ROOT_INCLUDE_PATH\" value=\"$$INCLUDE\" type=\"path\"/>\n <use name=\"root_cxxdefaults\"/>\n <use name=\"sockets\"/>\n</tool>\"\"\")\n\n contents = template.substitute(values)\n self.write_scram_toolfile(contents, fname)\n\n fname = 'libuuid.xml'\n template = Template(\"\"\"<tool name=\"libuuid\" version=\"$VER\">\n <lib name=\"uuid\"/>\n <client>\n <environment name=\"LIBUUID_BASE\" default=\"$PFX\"/>\n <environment name=\"LIBDIR\" default=\"$$LIBUUID_BASE/lib\"/>\n <environment name=\"INCLUDE\" default=\"$$LIBUUID_BASE/include\"/>\n </client>\n <runtime name=\"ROOT_INCLUDE_PATH\" value=\"$$INCLUDE\" type=\"path\"/>\n <use name=\"root_cxxdefaults\"/>\n <use name=\"sockets\"/>\n</tool>\"\"\")\n\n contents = template.substitute(values)\n self.write_scram_toolfile(contents, fname)", "def main():\n # Create / clean output dir\n if os.path.isdir(OUT_DIR):\n shutil.rmtree(OUT_DIR)\n os.mkdir(OUT_DIR)\n\n # Write all assets to the directory\n for fname, bb in create_assets().items():\n filename = os.path.join(OUT_DIR, fname)\n dirname = os.path.dirname(filename)\n if not os.path.isdir(dirname):\n os.makedirs(dirname)\n with open(filename, \"wb\") as f:\n f.write(bb)", "def write_dump_file(dir_name, file_name, ext, datas):\n f_name = build_file_path(dir_name, file_name, ext)\n with open(f_name, \"w\") as fd_out:\n fd_out.write(\"Dump file for %s\" % file_name)\n for name, data in datas.items():\n fd_out.write(\"\\n\\n%s\\n%s\\n\\n\" % (\"-\" *80, name))\n if isinstance(data, dict):\n for key, value in data.items():\n if isinstance(value, OrderedDict):\n value = dict(value)\n fd_out.write('\"%s\": %s\\n' % (key, pprint.pformat(value, indent=4)))\n else:\n fd_out.write(pprint.pformat(data, indent=4))", "def open_output_files(self):\n if not os.path.exists(self.outputDictionaryPath):\n os.makedirs(self.outputDictionaryPath)\n\n self.XMLfile = open(os.path.join(self.outputDictionaryPath, 'MyDictionary.xml'), 'w+', encoding='utf-8') # this is the output file\n self.Makefile = open(os.path.join(self.outputDictionaryPath, 'Makefile'), 'w+', encoding='utf-8')\n self.MyInfoFile = open(os.path.join(self.outputDictionaryPath, 'MyInfo.plist'), 'w+', encoding='utf-8')", "def dump_cworld_tar(\n cooler_paths,\n out_path,\n):\n\n dataset_name = os.path.splitext(os.path.split(out_path)[1])[0]\n\n with tempfile.TemporaryDirectory() as cworld_tmp_path:\n for cooler_path in cooler_paths:\n res = cooler.Cooler(cooler_path).info[\"bin-size\"]\n os.mkdir(os.path.join(cworld_tmp_path, \"C-\" + str(res)))\n for iced, iced_label in [(True, \"iced\"), (False, \"raw\")]:\n folder_path = os.path.join(cworld_tmp_path, \"C-\" + str(res), iced_label)\n os.mkdir(folder_path)\n\n mat_path = os.path.join(\n folder_path,\n \"{}__C-{}-{}.matrix.gz\".format(dataset_name, res, iced_label),\n )\n\n dump_cworld(\n in_cooler=cooler_path, out=mat_path, iced=iced, iced_unity=False\n )\n\n with tarfile.open(out_path, mode=\"w\") as archive:\n archive.add(cworld_tmp_path, arcname=dataset_name, recursive=True)", "def save (self, filename) :\n\t\tserialFile = open (filename, \"wb\")\n\t\tpickle.dump (self.production_rules, serialFile)\n\t\tpickle.dump (self.unitrelation, serialFile)\n\t\tpickle.dump (self.labels, serialFile)\n\t\tpickle.dump (self.keeper, serialFile)\n\t\tpickle.dump (self.strnodes, serialFile)\n\t\tpickle.dump (self.tokens, serialFile)\n\t\tserialFile.close()", "def dump_pkl(obj, path):\r\n f = open(path, 'wb')\r\n try:\r\n cPickle.dump(obj, f, protocol=cPickle.HIGHEST_PROTOCOL)\r\n finally:\r\n f.close()", "def write_all_patients():\n\n data_dir = sys.argv[1]\n output_dir = sys.argv[2]\n\n imgs, i_msks, o_msks = load_all_patients(data_dir=data_dir)\n\n for idx, array in enumerate(imgs):\n np.save(output_dir+'/img_'+str(idx), array)\n for idx, array in enumerate(i_msks):\n np.save(output_dir+'/i_msk_'+str(idx), array)\n for idx, array in enumerate(o_msks):\n np.save(output_dir + '/o_msk_' + str(idx), array)\n\n return None", "def write_maps(self):\n if np.allclose(self.xmap.origin, 0):\n ext = \"ccp4\"\n else:\n ext = \"mrc\"\n\n for q, coor, b in zip(self._occupancies, self._coor_set, self._bs):\n self.conformer.q = q\n self.conformer.coor = coor\n self.conformer.b = b\n self._transformer.density()\n fname = os.path.join(self.directory_name, f\"model.{ext}\")\n self._transformer.xmap.tofile(fname)\n self._transformer.xmap.array -= self.xmap.array\n fname = os.path.join(self.directory_name, f\"diff.{ext}\")\n self._transformer.xmap.tofile(fname)\n self._transformer.reset(full=True)", "def dump_dc():\n network = Network(pickle=True)\n skus = {}\n for i in range(0, len(network.dcs)):\n for j in network.dcs[i].inventory.keys():\n if j not in skus:\n skus[j] = [0, 0, 0, 0, 0]\n skus[j][i] += network.dcs[i].inventory[j]\n else:\n skus[j][i] += network.dcs[i].inventory[j]\n arr = []\n for i in skus.keys():\n arr.append([i,skus[i][0], skus[i][1], skus[i][2], skus[i][3], skus[i][4]])\n with open('dc_inv.csv', 'wb') as csv_file:\n writer = csv.writer(csv_file)\n writer.writerows(arr)", "def strudump(self, args):\n if not self.stru:\n print(\"missing CroStru file\")\n return\n self.dump_db_table_defs(args)", "def writeto(self, fileout):\n \n dump_pkl(self.data, fileout)", "def writeCC(self, fileName, allSCC):\n f = open(fileName,'w')\n\n for compNumber in range(0,len(allSCC)):\n f.write(\"Component number %s: \" % (compNumber))\n f.write(\"%s\\n\" % (str(allSCC[compNumber])))\n f.close()", "def output_cache(cc):\n\n out_file = os.path.join(cc.scene_dir, 'output', cc.scene_id+'_pickle')\n\n if cc.atmo_src == 'narr':\n out_file += '_narr'\n elif cc.atmo_src == 'merra':\n out_file += '_merra'\n\n with open(out_file, 'wb') as f:\n pickle.dump(cc, f)", "def save(self, cert_path: Union[Path, str], key_path: Union[Path, str]):\n cert_path, key_path = Path(cert_path), Path(key_path)\n\n cert_path.parent.mkdir(parents=True, exist_ok=True)\n with cert_path.open(\"wb\") as file:\n file.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, self.cert))\n\n key_path.parent.mkdir(parents=True, exist_ok=True)\n with key_path.open(\"wb\") as file:\n file.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, self.key))", "def dump_all_binaries_to_CSV():\n ## TODO\n timenow = datetime.now()", "def sign_certificate(csr):\n unique_filename = str(uuid.uuid4().hex)\n\n file = open(\"./csr_req/%s.csr\" % unique_filename, \"w\")\n file.write(csr.decode(\"utf-8\"))\n file.close()\n\n subprocess.run([\"../ca/scripts/sign.sh\", unique_filename], check=False)\n\n file = open(\"./csr_req/%s.p7b\" % unique_filename, \"r\")\n cert = file.read()\n\n os.remove(\"./csr_req/%s.csr\" % unique_filename)\n os.remove(\"./csr_req/%s.p7b\" % unique_filename)\n\n return cert", "def genRST(path):\n \n cplrRST = open(path, \"w\")\n for path in files:\n appendFile(path, cplrRST)\n cplrRST.close()", "def Dump_File(output):\n now = datetime.now()\n log_date = now.strftime(\"%Y-%m-%d\")\n log_file = str(log_date + \"-cisco_output.txt\")\n try:\n os.mknod(log_file)\n with open(log_file, 'wa') as f:\n f.write(output)\n f.write(\"\\n\")\n f.close()\n except OSError as err:\n with open(log_file, 'wa') as f:\n f.write(output)\n f.write(\"\\n\")\n f.close()", "def test_bcftools_cli_dump(self):\n runner = CliRunner()\n result = runner.invoke(cli.main, [\"dump\"])\n assert result.exit_code == 0\n assert os.path.isfile(os.path.join(BASE_DIR, \"hmtnote_dump.pkl\"))", "def dump_dict(dumpfilename, file_names, test_files, offsets_dict):\n if _dump_dict_on:\n file(_dumpify(umpfilename + '.txt'), 'wt').write(_od_offsets_string(file_names, offsets_dict))\n file(o_dumpify('.strings.txt'), 'wt').write(_od_substrings_string(offsets_dict))\n matrix = _od_offsets_matrix(file_names, offsets_dict, test_files)\n file(_dumpify(dumpfilename + '.csv'), 'wt').write(matrix)", "def save(self):\n \n with open(os.path.join(self.output_dir, 'terms.dict'), 'wb') as f:\n pkl.dump(self.term_id_map, f)\n with open(os.path.join(self.output_dir, 'docs.dict'), 'wb') as f:\n pkl.dump(self.doc_id_map, f)", "def output(cmds_list=\"\", filename=None):\n global _use\n calling_file = inspect.stack()[1].filename\n srcfile = calling_file.split(\"\\\\\")[-1]\n\n dstfile = (\n f'{filename}.scad' if filename else srcfile.replace(\".py\", \".scad\")\n )\n\n f = open(dstfile, \"w\")\n if _use:\n f.write(_use)\n _use = \"\"\n for cmd in cmds_list:\n f.write(cmd)\n f.close()", "def dump_xmlc_dataset(X: csr_matrix, y: csr_matrix, f: Union[str, TextIOWrapper]) -> None:\n\n # check type of argument\n if isinstance(f, str):\n file = open(f, 'w')\n elif isinstance(f, TextIOWrapper):\n file = f\n else:\n raise TypeError(f'f is type {type(f)} but should be either str or TextIOWrapper')\n\n # create and write header\n header = f'{X.shape[0]} {X.shape[1]} {y.shape[1]}\\n'\n file.write(header)\n\n # write data\n for index, (feature_vector, label_vector) in enumerate(zip(X, y)):\n labels = ','.join(map(str, label_vector.nonzero()[1]))\n\n features = [f'{str(ind)}:{str(feature_vector[0, ind])}' for ind in feature_vector.nonzero()[1]]\n features = \" \".join(features)\n\n file.write(f'{\" \".join((labels, features))}\\n')\n\n file.close()\n\n return", "def dump_objects():\n pass", "def save(self, path=\"\"):\n path = path + \"model_\" + str(self.name) + \".txt\"\n if os.path.isfile(path):\n os.remove(path)\n f = open(path, \"w+\")\n for ident in self.networks:\n f.write(ident + \"_\" + self.networks[ident].descriptor.codify_components() + \"_\" + str(self.networks[ident].taking.size) + \",\" + self.networks[ident].taking.type + \"_\" + str(self.networks[ident].producing.size) + \",\" + self.networks[ident].producing.type + \"_\" +\n str(self.networks[ident].depth) + \"_\" + \",\".join(self.reachable[ident]) + \"_\" + \",\".join(self.comps_below[ident]) + \"\\n\")\n f.write(\"\\n\")\n\n for ident in self.inputs:\n f.write(ident + \"_\" + str(self.inputs[ident].producing.size) + \"_\" + self.inputs[ident].producing.type + \"_\" + str(self.inputs[ident].depth) + \"\\n\")\n f.write(\"\\n\")\n\n for ident in self.outputs:\n f.write(ident + \"_\" + str(self.outputs[ident].taking.size) + \"_\" + self.outputs[ident].taking.type + \"_\" + str(self.outputs[ident].depth) + \"_\" + \",\".join(self.comps_below[ident]) + \"\\n\")\n f.write(\"\\n\")\n\n for con in self.connections:\n f.write(self.connections[con].codify() + \"\\n\")\n #f.write(\"\\n\")\n\n f.close()\n\n return path", "def extract_diagnostics (self):\n\t\t# TODO: something like this could move into the base class\n\t\tdiag = {}\n\t\tfilenames = [\n\t\t\tINSEQ_FILENAME,\n\t\t\tOUTALIGN_FILENAME,\n\t\t]\n\t\tfor item in filenames:\n\t\t\tfpath = os.path.join (self._curr_workdir, item)\n\t\t\tdiag[item] = utils.file_to_string (fpath)\n\t\treturn diag", "def dump(fh, xs, model=None, properties=False, indent=True, **kwargs):\n text = dumps(\n xs, model=model, properties=properties, indent=indent, **kwargs\n )\n if hasattr(file, 'write'):\n print(text, file=file)\n else:\n with open(file, 'w') as fh:\n print(text, file=fh)", "def _write_test_dump(pcap_file, dump):\n\t\tfor packet in dump:\n\t\t\twrpcap(pcap_file, packet, append=True)", "def hexdump(args=None):\n args = parser.parse_args(args)\n with LogSetup(args):\n contents = args.file.read()\n args.file.close()\n dump(contents, width=args.width)", "def archive_backup(self):\n\n # Archiving the Training script\n shutil.copyfile(self.script_path, self.save_path + '/0-' + os.path.basename(self.script_path))\n os.chmod(self.save_path + '/0-' + os.path.basename(self.script_path), 0o755)\n # Archiving the src folder\n pkg_path = os.path.dirname(arch_src)\n backup_path = os.path.join(self.save_path, 'src_backup')\n shutil.make_archive(backup_path, 'gztar', pkg_path)\n\n # Archiving the Environment Info\n env_info = collect_env.get_pretty_env_info()\n with open(self.save_path + '/env_info.txt', 'w') as f:\n f.write(env_info)", "def write_out_v(vc2ptmk, ofn):\n print \"Writing out verbose to [{}]\".format(ofn)\n with codecs.open(ofn, \"w\", \"utf8\") as ofd:\n for (co, uri), infos in sorted(vc2ptmk.items()):\n for di in infos:\n ol = u\"{}\\t{}\\t{}\\t{}\\t{}\\n\".format(\n co, uri, di[\"ptmk\"], di[\"spk\"], di[\"sco\"])\n ofd.write(ol)", "def dump(self, filename):\n\n utils.save(filename, {'model': self}, zipped=True)", "def save_decompression():\n dna_seq, bin_seq, comp_seq, file_comp = binary_to_seq()\n \n #create a new file containing the original sequence\n file_path = os.path.splitext(file_comp)[0]\n file = open(file_path+ \"_decompressed.txt\", \"w\")\n file.write(dna_seq)\n file.close()\n \n #show a message for saving\n messagebox.showinfo(\"Information\", \"Your decompression has been saved in \"\n +file_path+\"_decompressed.txt.\")\n \n #print(comp_seq, dna_seq, bin_seq)\n return comp_seq, dna_seq, bin_seq", "def _print_breakdown(cls, savedir, fname, data):\n if not os.path.exists(savedir):\n os.makedirs(savedir)\n\n with open(os.path.join(savedir, fname), 'w') as fout:\n fout.write(data)", "def save_model_states(state_dict, sparsified_model_dump_path, save_file_name, sparse_block_shape, norm, zip=True):\n folder_name = os.path.join(sparsified_model_dump_path, str(norm))\n\n # save model only states\n folder_str = f\"config_{sparse_block_shape}\"\n model_state = state_dict['state_dict']\n model_state_path = os.path.join(folder_name, folder_str, save_file_name)\n\n if not os.path.exists(os.path.dirname(model_state_path)):\n os.makedirs(os.path.dirname(model_state_path))\n torch.save(model_state, model_state_path)\n\n if zip:\n zip_path = model_state_path.replace('.ckpt', '.zip')\n with ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zip:\n zip.write(model_state_path, save_file_name)\n os.remove(model_state_path) # store it as zip, remove uncompressed\n model_state_path = zip_path\n\n model_state_path = os.path.abspath(model_state_path)\n file_size = os.path.getsize(model_state_path)\n file_size = file_size >> 20 # size in mb\n return model_state_path, file_size", "def write_features_to_file(filename,locs,desc):\n savetxt(filename, hstack((locs, desc)))", "def task_output_census_tracts():\n for dept in Department.list():\n yield {\n 'name': dept.name,\n 'file_dep': [dept.census_tracts_path],\n 'targets': [dept.census_tracts_output],\n 'actions': ['cp %(dependencies)s %(targets)s'],\n 'clean': True,\n }", "def dump(self, filename):\n suffix = filename.split(\".\")[-1]\n if not suffix == \"dflx\":\n filename = filename + \".dflx\"\n os.makedirs(os.path.dirname(filename), exist_ok=True)\n f = open(filename, \"wb\")\n pickle.dump(self.meta, f)\n pickle.dump(self.__dict__, f)\n f.close()\n logging.info(\"Results dumped to %s.\", filename)", "def create_file_structure_from_tocs(work_path, out_path):\n print('Extracts files from the pyinstaller workpath')\n file_path_set = set()\n for path1 in os.listdir(work_path):\n full_path = os.path.join(work_path, path1)\n if not os.path.isdir(full_path):\n continue\n\n # We have a directory\n for path2 in os.listdir(full_path):\n if not '.toc' in path2:\n continue\n full_toc_file = os.path.join(full_path, path2)\n get_all_files_from_toc(full_toc_file, file_path_set)\n\n for file_path in file_path_set:\n relative_src = file_path[1:] if file_path[0] == '/' else file_path\n dst_path = os.path.join(out_path, relative_src)\n os.makedirs(os.path.dirname(dst_path), exist_ok=True)\n shutil.copy(file_path, dst_path)", "def to_crystfel_file(self, filename, coffset=None):\n translate.write_generic_crystfel(self, filename, coffset=coffset)\n return", "def write(self, cull=False):\n if cull:\n cull_prefixes(self).write()\n else:\n ser = self.g.serialize(format='nifttl', encoding='utf-8')\n with open(self.filename, 'wb') as f:\n f.write(ser)\n #print('yes we wrote the first version...', self.name)", "def dumpMemory():\n libxml2mod.xmlDumpMemory()", "def Save_Fastas3(Neg_cases,NumDataset):\r\n handle = open(\"../Data/Negative_cases/Uniprot_total_neg_cases_pfam_dom.fasta\", \"rU\")\r\n records = list(SeqIO.parse(handle, \"fasta\"))\r\n handle.close()\r\n \r\n directory=\"../Data/Datasets/Dataset\"+str(NumDataset)\r\n if not os.path.exists(directory):\r\n os.makedirs(directory)\r\n file=open(\"../Data/Datasets/Dataset\"+str(NumDataset)+\"/negative_cases.fasta\",\"w\")\r\n for i in Neg_cases:\r\n file.write(str(records[i].format(\"fasta\")))\r\n file.close", "def build_catalog(filename):\n\n write_to_file(filename)", "def Write(self):\n template_mappings = {}\n\n template_file = os.path.join(self._l2tdevtools_path, self._TEMPLATE_FILE)\n file_content = self._GenerateFromTemplate(template_file, template_mappings)\n\n file_content = file_content.encode('utf-8')\n\n with open(self.PATH, 'wb') as file_object:\n file_object.write(file_content)", "def cmd_objdump_io(cmd):\n cmd = cmd + \" > tmp.out\"\n os.system(cmd)\n with open(\"tmp.out\") as f:\n print(\"\".join(\"# \" + x for x in f.readlines()[7:]))\n os.system(\"rm -f tmp.out\")", "def dumps(records, xslt_filename=None, **kwargs):\n root = dumps_etree(records=records, xslt_filename=xslt_filename)\n return etree.tostring(\n root,\n pretty_print=True,\n xml_declaration=True,\n encoding='UTF-8',\n **kwargs\n )", "def makeSpkSetupFile(leapSecondFilePath, outputPath):\n\n # If the file already exists, delete it and rewrite it.\n if os.path.exists(outputPath):\n os.remove(outputPath)\n\n# print 'Generating LRONAC compatible .pvl file ' + halfResFilePath\n f = open(outputPath, 'w')\n f.write(\"\\\\begindata\\n\")\n f.write(\"INPUT_DATA_TYPE = 'STATES'\\n\")\n f.write(\"OUTPUT_SPK_TYPE = 13\\n\")\n f.write(\"OBJECT_ID = -85\\n\") # LRO\n f.write(\"CENTER_ID = 301\\n\") # Moon\n f.write(\"REF_FRAME_NAME = 'J2000'\\n\")\n f.write(\"PRODUCER_ID = 'Lronac Pipeline'\\n\")\n f.write(\"DATA_ORDER = 'epoch x y z vx vy vz'\\n\")\n f.write(\"DATA_DELIMITER = ','\\n\")\n f.write(\"LEAPSECONDS_FILE = '\" + leapSecondFilePath + \"'\\n\")\n f.write(\"LINES_PER_RECORD = 1\\n\")\n f.write(\"TIME_WRAPPER = '# ETSECONDS'\\n\")\n #f.write(\"EPOCH_STR_LENGTH = 16\\n\")\n f.write(\"INPUT_DATA_UNITS = ('ANGLES=DEGREES' 'DISTANCES=km')\\n\")\n f.write(\"POLYNOM_DEGREE = 11\\n\")\n f.write(\"SEGMENT_ID = 'SPK_STATES_13'\\n\")\n# f.write(\"INPUT_DATA_FILE = 'spkDataFile.txt'\")\n# f.write(\"OUTPUT_SPK_FILE = '/home/smcmich1/testSpkFile.bsp'\")\n f.write(\"\\\\begintext\\n\")\n f.close()", "def _write_docx(self):\n with ZipFile(self.out_file, 'w') as f:\n self._write_content_types(f)\n self._write_app(f)\n self._write_core(f)\n self._write_rels(f)\n self._write_document(f)\n self._write_fonts(f)\n self._write_document_rels(f)\n self._write_settings(f)\n self._write_styles(f)", "def write_conformers(self, filename): # ccids):\n cnt = 0\n for confId in range(self.nconf): #ccids:\n w = Chem.SDWriter('%s_c%03d.sdf'%(filename,cnt+1))\n w.write(self.mol, confId=confId)\n w.flush()\n w.close()\n cnt += 1", "def save_file(self):\n # paginate over deputies and senators getting their fields\n fieldnames = set([])\n congressmen = self.deputies + self.senators\n for data in congressmen:\n fieldnames = fieldnames.union(data.dump().keys())\n\n\n with open(IDENTITY_FILE_UPDATED, 'a') as csvfile:\n writer = csv.DictWriter(csvfile, fieldnames=list(fieldnames), delimiter=';')\n writer.writeheader()\n\n for data in congressmen:\n writer.writerow(data.dump())", "def save_data(data_dir):\r\n for k in range(1,11):\r\n fold_name = 'fold' + str(k)\r\n print \"Saving\" + fold_name\r\n features, labels = process_audio(parent_path, [fold_name])\r\n labels = encode(labels)\r\n print \"Features of\", fold_name , \" = \", features.shape\r\n print \"Labels of\", fold_name , \" = \", labels.shape\r\n feature_file = os.path.join(data_dir, fold_name + '_x.npy')\r\n labels_file = os.path.join(data_dir, fold_name + '_y.npy')\r\n np.save(feature_file, features)\r\n print \"Saved \" + feature_file\r\n np.save(labels_file, labels)\r\n print \"Saved \" + labels_file", "def create_pki():\n os.mkdir(pki_dir)\n os.mkdir(f'{pki_dir}/newcerts')\n Path(f'{pki_dir}/index.txt').touch()\n with open(f'{pki_dir}/serial', 'w') as serial_file:\n serial_file.write('00000000')\n serial_file.close()\n create_CA('/CN=My cool CA/O=Honest Achmed/OU=Used Cars/C=EU')", "def dump_processed_data_to_file(self, facts, accu_label, article_label, imprison_label):\r\n data = [facts, accu_label, article_label, imprison_label]\r\n with open(util.MID_DATA_PKL_FILE_LOC, \"wb\") as f:\r\n pickle.dump(data, f)\r\n if util.DEBUG:\r\n print(\"DEBUG: data dumped to `.pkl` file\")", "def save_dumps(module_name: str, dumps: Dict[str, str], dump_root: str = \".\"):\n\n for dump_format in dumps:\n dump_name = module_name + \".\" + dump_format\n with open(Path(dump_root, dump_name), \"w\") as f:\n f.write(dumps[dump_format])", "def save(self,fout):\n\n # only process 0 should save\n if COMM_WORLD.rank == 0:\n\n # The file format is:\n # L,nterms,masks,signs,coefficients\n # where each is just a binary blob, one after the other.\n\n # do this first so that we haven't already created the file if\n # it fails for some reason\n msc = self.get_MSC()\n\n with open(fout,mode='wb') as f:\n\n # write the chain length to the file. This is the only parameter\n # that we save other than the MSC representation.\n L = self.L\n if L is None:\n raise ValueError('L must be set before saving to disk.')\n\n # cast it to the type that C will be looking for\n int_t = msc.dtype[0].type\n L = int_t(L)\n\n f.write(L.tobytes())\n\n # write out the length of the MSC representation\n size = int_t(msc.size)\n f.write(size.tobytes())\n\n f.write(msc['masks'].tobytes())\n f.write(msc['signs'].tobytes())\n f.write(msc['coeffs'].tobytes())\n\n COMM_WORLD.barrier()", "def write(filename):\n print(uc.write(filename))", "def saveacc2bst(bst_pols, filestarttimes, calrunstarttime,\n calrunduration, rcumode, calsrc, caltab_id, stnid,\n used_autocorr, saveformat=\"hdf5\"):\n (bstXX, bstXY, bstYY) = bst_pols\n version = '5' # Version of this dataformat\n calrundurationstr = str(int(calrunduration.total_seconds()))\n # Calculate start of ACC run.\n # Form self describing filename.\n dtlabel = 'acc2bst'\n acc2bstbase = \"{}_{}_rcu{}_{}_dur{}_ct{}_v{}_{}\".format(\n stnid, calrunstarttime.strftime(\"%Y%m%dT%H%M%S\"), rcumode, calsrc,\n calrundurationstr, caltab_id, version, dtlabel)\n pntstr = ilisa.monitorcontrol.directions.normalizebeamctldir(calsrc)\n # Write out the data.\n if saveformat == 'hdf5':\n hf = h5py.File(acc2bstbase + \".hdf5\", \"w\")\n freqs = modeparms.rcumode2sbfreqs(rcumode)\n hf.attrs['DataDescription'] = 'LOFAR acc2bst data'\n hf.attrs['StationID'] = stnid\n hf.attrs['calibrationSource'] = calsrc\n hf.attrs['pointing'] = pntstr\n hf.attrs['ObservationStart'] = calrunstarttime.isoformat()\n hf.attrs['ObservationDuration'] = calrundurationstr\n hf.attrs['calibrationTableDate'] = caltab_id\n hf.attrs['version'] = version\n hf.attrs['use_ac'] = used_autocorr\n hf['frequency'] = freqs\n hf['frequency'].attrs['unit'] = \"Hz\"\n hf['timeaccstart'] = filestarttimes.view('<i8')\n hf['timeaccstart'].attrs['unit'] = \"s\"\n\n hf['XX'] = bstXX\n hf['XX'].attrs['unit'] = \"arb. power\"\n hf['XY'] = bstXY\n hf['XY'].attrs['unit'] = \"arb. complex power\"\n hf['YY'] = bstYY\n hf['YY'].attrs['unit'] = \"arb. power\"\n\n hf['XX'].dims.create_scale(hf['timeaccstart'])\n hf['XX'].dims.create_scale(hf['frequency'])\n hf['XY'].dims.create_scale(hf['timeaccstart'])\n hf['XY'].dims.create_scale(hf['frequency'])\n hf['YY'].dims.create_scale(hf['timeaccstart'])\n hf['YY'].dims.create_scale(hf['frequency'])\n hf['XX'].dims[0].attach_scale(hf['timeaccstart'])\n hf['XX'].dims[1].attach_scale(hf['frequency'])\n hf['XY'].dims[0].attach_scale(hf['timeaccstart'])\n hf['XY'].dims[1].attach_scale(hf['frequency'])\n hf['YY'].dims[0].attach_scale(hf['timeaccstart'])\n hf['YY'].dims[1].attach_scale(hf['frequency'])\n hf.close()\n else:\n numpy.save(acc2bstbase + '_times', filestarttimes)\n numpy.save(acc2bstbase + '_XX', bstXX)\n numpy.save(acc2bstbase + '_XY', bstXY)\n numpy.save(acc2bstbase + '_YY', bstYY)\n return acc2bstbase + \".\" + saveformat", "def exportFoldFile(vectors, authors, fileName):\n with open(fileName, \"w\") as fFile:\n for idv, vec in enumerate(vectors):\n [fFile.write(str(val)+',') for val in vec]\n fFile.write(authors[idv] + '\\n')", "def dump(self):\n for cache_set in self.cache_sets:\n cache_set.dump()", "def save_mfccs(fname, mfccs, categories, distances=None):\n # data prep\n n = len(mfccs)\n categories = np.asarray(categories)\n mfcc_lens = np.empty(n, int)\n for i, mfcc in enumerate(mfccs):\n mfcc_lens[i] = mfcc.shape[0]\n flat_mfccs = np.concatenate(mfccs, axis=0)\n # save data\n if os.path.isfile(fname):\n _save_mfccs_append(fname, mfccs, flat_mfccs, categories, distances, mfcc_lens)\n else:\n _save_mfccs_new(fname, mfccs, flat_mfccs, categories, distances, mfcc_lens)", "def dumpf(self, gzip=False):\n if 0 != len(self.sources):\n os.mkdir(self.name)\n filename = os.path.join(self.name, 'bootstrap.sh')\n f = codecs.open(filename, 'w', encoding='utf-8')\n elif gzip:\n filename = '{0}.sh.gz'.format(self.name)\n f = gziplib.open(filename, 'w')\n else:\n filename = '{0}.sh'.format(self.name)\n f = codecs.open(filename, 'w', encoding='utf-8')\n f.write(self.comment)\n f.write('cd \"$(dirname \"$0\")\"\\n')\n for filename2, content in sorted(self.sources.iteritems()):\n f2 = open(os.path.join(self.name, filename2), 'w')\n f2.write(content)\n f2.close()\n for out in self.out:\n f.write(out)\n f.close()\n if gzip and 0 != len(self.sources):\n filename = 'sh-{0}.tar.gz'.format(self.name)\n tarball = tarfile.open(filename, 'w:gz')\n tarball.add(self.name)\n tarball.close()\n return filename\n return filename", "def dump(obj, dest_file):\n dest_file.write(dumps(obj))", "def main():\n train_src = read_file(SRC_TRAIN)\n train_tgt = read_file(TRGT_TRAIN)\n val_src = read_file(SRC_VAL)\n val_tgt = read_file(TRGT_VAL)\n # val = read_files(VAL_FILES)\n np.savez(\n DATA_NPZ_NAME, train_src=train_src, train_tgt=train_tgt, val_src=val_src, val_tgt=val_tgt)", "def export_proof(proof, filename):\n pickle_out = open(filename, \"wb\")\n pickle.dump(proof, pickle_out)\n pickle_out.close()\n print(\"Proof was written in \" + filename)", "def export_project_dump(self, key):", "def write_vecs(self, vecs_fname):\r\n header = f'{self.vectors.shape[0]} {self.vectors.shape[1]}'\r\n np.savetxt(vecs_fname, np.hstack([self.words.reshape(-1, 1), self.vectors]), fmt='%s', header=header)", "def write_telluric_transmission_to_file(wls,T,outpath):\n import pickle\n print('------Saving teluric transmission to '+outpath)\n with open(outpath, 'wb') as f: pickle.dump((wls,T),f)", "def export(ctx):\n LOG.info(\"Running scout export\")", "def write_uncompressed_skims(skims, directory, overwrite=False):\n os.makedirs(directory, exist_ok=True)\n for k in skims:\n filename = os.path.join(directory, f\"{k}.emx\")\n if not os.path.exists(filename) or overwrite:\n skims[k].values.tofile(filename)", "def flush(self, filename=None):\n if filename is None:\n filename = BlockChainConf.DEFAULT_CHAIN_DUMP_FILENAME\n\n with open(filename, \"w\") as dump:\n dump.write(dumps(self.chain))" ]
[ "0.5700215", "0.5441109", "0.53787977", "0.5377256", "0.53298485", "0.53045154", "0.5229976", "0.522061", "0.5135358", "0.5086592", "0.5069545", "0.50393033", "0.5038316", "0.5030887", "0.50227106", "0.50061053", "0.49667236", "0.49623433", "0.4961675", "0.49565977", "0.49195933", "0.49145895", "0.4910373", "0.49041113", "0.48776022", "0.4874583", "0.4870693", "0.4865715", "0.48553643", "0.48505354", "0.48473114", "0.4847026", "0.4836419", "0.4834511", "0.4829839", "0.48182493", "0.48121268", "0.48096064", "0.48045757", "0.47945866", "0.47829348", "0.47819477", "0.47790623", "0.47549102", "0.47530562", "0.47365022", "0.47333264", "0.4727789", "0.47252354", "0.47216886", "0.472012", "0.4718274", "0.4705671", "0.47007778", "0.4687355", "0.4686533", "0.46813834", "0.46804538", "0.46669322", "0.4666194", "0.4656732", "0.46548438", "0.46528247", "0.46511075", "0.4651067", "0.46449724", "0.46364507", "0.46293214", "0.46292466", "0.4617359", "0.46114966", "0.46003106", "0.45986035", "0.4592399", "0.4585795", "0.45849615", "0.45691928", "0.4568587", "0.45674855", "0.4563884", "0.4561236", "0.4560476", "0.4554256", "0.454792", "0.45479104", "0.45464665", "0.45430914", "0.45389533", "0.45373732", "0.45334664", "0.45322016", "0.45298344", "0.45293644", "0.45263004", "0.45241925", "0.4523917", "0.45233148", "0.45229104", "0.4517388", "0.45106077", "0.45103768" ]
0.0
-1
Load arguments, select and run command.
def run_sysca(argv): global QUIET ap = setup_args() args = ap.parse_args(argv) if args.quiet: QUIET = True if args.command == 'new-key': newkey_command(args) elif args.command == 'request': req_command(args) elif args.command == 'sign': sign_command(args) elif args.command == 'selfsign': selfsign_command(args) elif args.command == 'show': show_command(args) else: die("Unknown command: %s", args.command)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main():\n sys.argv.pop(0)\n (cmd, var, args) = process_options(sys.argv[:])\n execute(cmd, var, args)", "def main(args):\n options = parse_cmd_parameters_(args)\n execute_(options)", "def do_command(self, args):\n pass", "def execute(self):\n \n current_dir = os.getcwd()\n sys.path.append(current_dir)\n \n parser = self.get_parser()\n \n self.autocomplete()\n \n result = parser.parse_args(self.argv[1:])\n \n kwargs = vars(result)\n \n env = kwargs.pop('env', None)\n if env is not None:\n options['env'].set(unicode(env))\n \n tornado.options.parse_command_line()\n \n if env is not None:\n tornado.options.parse_config_file(os.path.join(current_dir, 'envs/base.py'))\n if '.' in options.env:\n tornado.options.parse_config_file(os.path.join(current_dir, 'envs/%s.py' % (options.env.split('.')[0], )))\n tornado.options.parse_config_file(os.path.join(current_dir, 'envs/%s.py' % (options.env, )))\n \n command = get_command(kwargs.pop('subparser'))()\n \n if command.require_env and env is None:\n raise Exception('This command require env')\n \n command.handle(**kwargs)", "def mainCommand(self, args):\r\n command = args.pop(0).lower() # calls exception if no arguments present\r\n if command in vars(CommandManager):\r\n vars(CommandManager)[command](self, *args) # calls exception if wrong amount of arguments\r", "def load(args):\n subprocess.check_call([\"/bin/launchctl\", \"load\"] + values.get(args))", "def run(self):\n self._params = self.parsingcommands()\n self.start()", "def load_and_run(runner):\r\n\r\n #**************************************************************************************************************************\r\n #Cache commands to be executed.\r\n #If using -object spoofs, -enable option needs to be added as either a standalone command or an option in another command.\r\n #Note: -enable is not required for named spoofs (-id).\r\n #Example command: runner.AddCommand('spoof_main \"-enable -object CHwOrders -data acRPM -value 0.33 -enable\"\\n','->',False)\r\n #Templates:\r\n #runner.AddCommand('spoof_main \"-enable\"\\n','->',False)\r\n #runner.AddCommand('spoof_main \"-id -var1 -var2 \"\\n','->',False)\r\n #runner.AddCommand('spoof_main \"-enable -object -data -value \"\\n','->',False)\r\n #runner.AddCommand('<enter any telnet command here>\\n','->',False)\r\n #**************************************************************************************************************************\r\n\r\n\r\n\r\n #**************************************************************************************************************************\r\n #Execute cached commands, then clear cached commands to get runner ready for next set of commands.\r\n #Optional pause here with additional details if necessary (i.e. instructions, timing, etc.).\r\n #The raw_input method will display the string then the operator will have to press ENTER to proceed.\r\n #raw_input(\"<Prompt to present to operator>\");\r\n #**************************************************************************************************************************\r\n runner.AddCommand(\"Log_Test_Info(\\\"Running spoof script \" + scriptName + \"\\\")\\n\", \"->\", False)\r\n runner.Run()\r\n runner.ResetCommands()\r\n \r\n \r\n runner.AddCommand('spoof_main\"-object CHwStates -data red -value 995\"\\n','->',False)\r\n runner.AddCommand('spoof_main\"-object CHwStates -data green -value 950\"\\n','->',False)\r\n runner.AddCommand('spoof_main\"-object ProcRun_IntegratedPlateletYield -data data -value 400000000000\"\\n','->',False)\r\n runner.AddCommand('spoof_main\"-enable\"\\n','->',False)\r\n runner.Run()\r\n runner.ResetCommands()\r\n\r\n\r\n #**************************************************************************************************************************\r\n #(Optional) Pause here with additional details if necessary (i.e. instructions, timing, etc.).\r\n #time.sleep for no operator prompt, raw_input for prompt.\r\n #The raw_input method will display the string then the operator will have to press ENTER to proceed.\r\n #time.sleep(30)\r\n #raw_input(\"<Prompt to present to operator>, press ENTER to continue.\");\r\n #**************************************************************************************************************************\r\n\r\n \r\n\r\n #**************************************************************************************************************************\r\n #(Optional) Next set of commands to be executed.\r\n #If more sets of commands are needed copies of this section and the \"Execute\" section below may be inserted after the \"Execute\" section below.\r\n #If data needs to be unspoofed prior to next spoof use command below.\r\n #runner.AddCommand('spoof_main \"-unspoof -object <Spoofed Object Here> -data <Spoofed Data Here>\"\\n','->',False)\r\n #Example command: runner.AddCommand('spoof_main \"-enable -object CHwOrders -data acRPM -value 0.33 -enable\"\\n','->',False)\r\n #**************************************************************************************************************************\r\n\r\n\r\n\r\n #**************************************************************************************************************************\r\n #(Optional) Execute next set of cached commands.\r\n #Optional pause here with additional details if necessary (i.e. instructions, timing, etc.).\r\n #The raw_input method will display the string then the operator will have to press ENTER to proceed.\r\n #raw_input(\"<Prompt to operator>\");\r\n #runner.Run()\r\n #runner.ResetCommands()\r\n #**************************************************************************************************************************\r\n\r\n\r\n\r\n #**************************************************************************************************************************\r\n #If desired, add a -disable -end or -unspoof command to disable the spoofer or simply unspoof spoofed data. \r\n #runner.AddCommand('spoof_main \"-unspoof -object <Spoofed Object Here> -data <Spoofed Data Here>\"\\n','->',False)\r\n #runner.AddCommand('spoof_main \"-disable\"\\n','->',False)\r\n #runner.AddCommand('spoof_main \"-end\"\\n','->',False)\r\n #**************************************************************************************************************************\r\n runner.AddCommand(\"Log_Test_Info(\\\"Exiting spoof script \" + scriptName + \"\\\")\\n\", \"->\", False)\r\n runner.Run()\r\n runner.ResetCommands()\r\n\r\n #**************************************************************************************************************************\r\n #Optional prompt to notify operator that script and all spoofs have been execute successfully.\r\n #raw_input(\"Script execution complete, press ENTER to close this window.\");\r\n #**************************************************************************************************************************\r", "def execute(self, args):", "def readCommand( argv ): ## argv belongs to the 'sys'-library and can be called through sys.argv. The function reads the console's comand line argument and passes it to a variable like so: args = sys.argv[1:]\n from optparse import OptionParser ## Option Parser is a powerful library for passing command line options (an advanced args) if you like. It allows you to add options by defining attributes. \n usageStr = \"\"\" \n USAGE: python pacman.py <options> \n EXAMPLES: (1) python pacman.py\n - starts an interactive game\n (2) python pacman.py --layout smallClassic --zoom 2\n OR python pacman.py -l smallClassic -z 2\n - starts an interactive game on a smaller board, zoomed in\n \"\"\" \n parser = OptionParser(usageStr) ## This creates the Option Parser instance. It also passes the usageStr which functions as a little help-text for the user.\n\n ### In this section all the option strings are defined. Typically each option has one short option string and one long option string. For example the parser.add_option('-n'... has '-n' as short and '--numGames' as the long option string. Both have the same effect. The option argument will be the same and be saved as the variabme 'numGames'. \n parser.add_option('-n', '--numGames', dest='numGames', type='int', \n help=default('the number of GAMES to play'), metavar='GAMES', default=1) ## the syntax for the options is (based on the example in this line) --n 3. This means that the value '3' would be assigned to the variable numGames.\n parser.add_option('-l', '--layout', dest='layout',\n help=default('the LAYOUT_FILE from which to load the map layout'), #The instance -> 'options.layout' defines the layout_file from which to load the map layout; DEFAULT = medium_classic\n metavar='LAYOUT_FILE', default='mediumClassic')\n parser.add_option('-p', '--pacman', dest='pacman',\n help=default('the agent TYPE in the pacmanAgents module to use'), #The instance -> 'options.pacman' defines which of the agent TYPE in the pacmanAgents moduleto use.\n metavar='TYPE', default='KeyboardAgent')\n parser.add_option('-t', '--textGraphics', action='store_true', dest='textGraphics',\n help='Display output as text only', default=False)\n parser.add_option('-q', '--quietTextGraphics', action='store_true', dest='quietGraphics',\n help='Generate minimal output and no graphics', default=False)\n parser.add_option('-g', '--ghosts', dest='ghost',\n help=default('the ghost agent TYPE in the ghostAgents module to use'),\n metavar = 'TYPE', default='RandomGhost')\n parser.add_option('-k', '--numghosts', type='int', dest='numGhosts',\n help=default('The maximum number of ghosts to use'), default=4)\n parser.add_option('-z', '--zoom', type='float', dest='zoom',\n help=default('Zoom the size of the graphics window'), default=1.0)\n parser.add_option('-f', '--fixRandomSeed', action='store_true', dest='fixRandomSeed',\n help='Fixes the random seed to always play the same game', default=False)\n parser.add_option('-r', '--recordActions', action='store_true', dest='record',\n help='Writes game histories to a file (named by the time they were played)', default=False)\n parser.add_option('--replay', dest='gameToReplay',\n help='A recorded game file (pickle) to replay', default=None)\n parser.add_option('-a','--agentArgs',dest='agentArgs',\n help='Comma separated values sent to agent. e.g. \"opt1=val1,opt2,opt3=val3\"')\n parser.add_option('-x', '--numTraining', dest='numTraining', type='int',\n help=default('How many episodes are training (suppresses output)'), default=0)\n parser.add_option('--frameTime', dest='frameTime', type='float',\n help=default('Time to delay between frames; <0 means keyboard'), default=0.1)\n parser.add_option('-c', '--catchExceptions', action='store_true', dest='catchExceptions',\n help='Turns on exception handling and timeouts during games', default=False)\n parser.add_option('--timeout', dest='timeout', type='int',\n help=default('Maximum length of time an agent can spend computing in a single game'), default=30)\n\n #ONCE ALL THE OPTIONS HAVE BEEN DEFINED, optparse is instructed to parse the programm's command line.\n ##> The parser.parse_args() returns two values:\n ### (A) OPTIONS: An object containing values for all of your options e.g.:e.g. if --file takes a single string argument, then options.file will be the filename supplied by the user, or None if the user did not supply that option\n ### (B) ARGS: The list of positional arguments leftover after parsing options (we call this here otherjunk)\n options, otherjunk = parser.parse_args(argv) ## if the user happens to accidentally enter a command other than the specified arguments specified by parser.add_option it is passed to otherjunk\n if len(otherjunk) != 0: ## if there actually ends up to be a value in the otherjunk the program raises an Exception.\n raise Exception('Command line input not understood: ' + str(otherjunk))\n args = dict() # ARGS IS THE VARIABLE THAT IS BEING RETURNED BY THE readCommand function.\n\n # Fix the random seed\n if options.fixRandomSeed: random.seed('cs188') # 'random.seed' is part of the random class. The random.seed([x]) command initialises a standard random number. Optional argument x can be any hashable object. \n\n # Choose a layout\n args['layout'] = layout.getLayout( options.layout ) # REF_LAYOUT111: layout.py --> This function returns the layout object that was created by the layout class via the getlayout function. This contains the height, width, walls, food, captules and agent positions etc.\n if args['layout'] == None: raise Exception(\"The layout \" + options.layout + \" cannot be found\")\n\n # Choose a Pacman agent\n noKeyboard = options.gameToReplay == None and (options.textGraphics or options.quietGraphics) ## noKeyboard is set to TRUE if the user chooses the --replay and text- or silent graphics option.\n ##print noKeyboard\n pacmanType = loadAgent(options.pacman, noKeyboard) ## [see REFERENCE_001]: the loadAgent function takes the pacman argument the user passed into the command line as the option--pacman option identifies the appropriate agent (which may be the programmed agent or whost agent). \n agentOpts = parseAgentArgs(options.agentArgs) ##Passes the option.agentArgs which was captured by the user's console input into the agentOps variable. agentArgs is: \"Comma separated values sent to agent. e.g. \"opt1=val1,opt2,opt3=val3. The ParseAgentArgs function converts the option - value pairings into a dictionary formatted opts[opt1] = val1. \n if options.numTraining > 0: ##numTraining was captured by the user's console input and designates how many games are training games which means that the output remains surpressed.\n args['numTraining'] = options.numTraining ## This takes the user's input as the -x or --numTraining and passes it to the args dictionary with the numTraining key as the args['numTraining'] variable.\n if 'numTraining' not in agentOpts: agentOpts['numTraining'] = options.numTraining ## This integrates the variable entered into as training rounds in the agentOpts variable.\n pacman = pacmanType(**agentOpts) ## REFERENCE002 ##Instantiate Pacman with agentOpts. ## The variable pacmanType contains a reference to agent module loaded by the load Agent function. This function does not cause the module to be instanciated. This happens when here ## See[REFERENCE_001]: ## The * and ** will 'soak up' any remaining values not otherwise accounted for. In this case these options are basically the agent options the user can input.\n ## agentOpts contains the opts dictionary = {opt1:val1, opt2:val2, opt3:val3}; it also contains the numTraining variable as the ['numTraining'] key. As such it has the following structure. {opt1:val1,opt2:val2,opt3:val3, numTraining:int}.\n args['pacman'] = pacman ## This passes the instanciated object to the agent dictionary containing the pacman key.\n\n # Don't display training games\n if 'numTrain' in agentOpts: ## Checks whether the user has determined a certain number of training games. If they did, the number is passed on as an int to the options.numQuiet and option.numIgnore variables.\n options.numQuiet = int(agentOpts['numTrain']) \n options.numIgnore = int(agentOpts['numTrain'])\n\n # Choose a ghost agent\n ghostType = loadAgent(options.ghost, noKeyboard) ## The options.ghost variable contains the user's ghost type preference as specified in the console.The user can choose between -g RandomGhost which is A ghost that chooses a legal action uniformly at random OR DirectionalGhost, a ghost that prefers to rush Pacman, or flee when scared.\n args['ghosts'] = [ghostType( i+1 ) for i in range( options.numGhosts )] #instanciates as many ghost agents as the player requested by entering the desired number as -k', '--numghosts'in the console.\n\n # Choose a display format ##contains whether the game output is displayed as minimal output and no graphics (-q) text only (-t) or via graphicsDiplay (standard)\n if options.quietGraphics: \n import textDisplay\n args['display'] = textDisplay.NullGraphics()\n elif options.textGraphics:\n import textDisplay\n textDisplay.SLEEP_TIME = options.frameTime\n args['display'] = textDisplay.PacmanGraphics()\n else:\n import graphicsDisplay ## This refers to the module that is responsible for the graphical representation of the game.\n args['display'] = graphicsDisplay.PacmanGraphics(options.zoom, frameTime = options.frameTime) ## This line instanciates the PacmanGraphics class from the graphicsDisplay module and passes the reference to the args['display'] dictionary.\n args['numGames'] = options.numGames \n args['record'] = options.record\n args['catchExceptions'] = options.catchExceptions\n args['timeout'] = options.timeout\n\n # Special case: recorded games don't use the runGames method or args structure\n if options.gameToReplay != None:\n print 'Replaying recorded game %s.' % options.gameToReplay \n import cPickle\n f = open(options.gameToReplay)\n try: recorded = cPickle.load(f)\n finally: f.close()\n recorded['display'] = args['display']\n replayGame(**recorded)\n sys.exit(0)\n\n return args #returns the args-dictionary which contains:\n ##args['pacman'] which contains a dictionary of dictionaries of the agent that was loaded into args['numtraining'] = {agentOpts[opt1]: val1 ; agentOpts[opt2]:val2; agentOpts[opt3]:val3}\n ##args['layout'] - this function returns the layout object that was created by the layout class via the getlayout function.\n ##args['numTraining'] which contains which designates how many games are training games which means that the output remains surpressed\n ##args['ghosts'] - contains the instanciated ghost agents in line with the number the user specified\n ##args['display'] - contains whether the game output is displayed as minimal output and no graphics (-q) text only (-t) or via graphicsDiplay (standard)\n ##args['numGames'] - the number of GAMES to play\n ##args['record'] - Writes game histories to a file (named by the time they were played)\n ##args['catchExceptions'] = options.catchExceptions - Turns on exception handling and timeouts during games\n ##args['timeout'] = options.timeout -Maximum length of time an agent can spend computing in a single game", "def __init__(self):\n\n self._options = docopt(__doc__, version=__version__)\n self._arguments = {\n k: v for k, v in self._options.items()\n if not isinstance(v, bool)\n }\n\n commands_json = json.loads(read_file(COMMANDS_JSON))\n command = list(filter(lambda x: self._is_command(x[\"Conditions\"]), commands_json))[0]\n\n getattr(\n import_module(\"qasm.commands.{0}\".format(command[\"Module Identifier\"])),\n command[\"Class Identifier\"]\n )(self._arguments).run()", "def _run_command(self, opts, args):\r\n cmd = self.search_commands(args[0])\r\n\r\n if opts.debug:\r\n LOGGER.setLevel(logging.DEBUG)\r\n LERR.setLevel(logging.DEBUG)\r\n\r\n if not (opts.nologo or cmd.nologo) and not self.interactive:\r\n sys.stdout.write(FIPSSTR)\r\n CLI.version(self._progname, versioning.__version__,\\\r\n versioning.__extracontent__, fileh=sys.stdout)\r\n if len(args) > 1:\r\n return cmd.run(args[1:])\r\n\r\n return cmd.run([])", "def main():\n opt = parse_opts()\n run(opt)", "def main():\n opt = parse_opts()\n run(opt)", "def run(self):\n if self.subcommand_parser is None:\n self.exit(1, 'Command defines no subcommands')\n\n args = self.parse_args()\n if args.command is None:\n self.exit(1, 'No command selected')", "def _run_as_argument(self):\n self.command = self.command_parser.run(self.args)\n if self.command:\n if self.command[0] == \"exit\":\n return\n self.answer = self.communication(self.command)\n self.print_answer(self.answer)\n \n self.command.clear()", "def execute(self, command: LoadCommand):\n command.execute()", "def execute(self):\n # Preprocess options to extract --settings and --pythonpath.\n # These options could affect the commands that are available, so they\n # must be processed early.\n parser = LaxOptionParser(usage=\"%prog subcommand [options] [args]\",\n version=magpy.get_version(),\n option_list=BaseCommand.option_list)\n self.autocomplete()\n try:\n options, args = parser.parse_args(self.argv)\n handle_default_options(options)\n except:\n # Ignore any option errors at this point.\n pass # pylint: disable-msg=W0702\n\n try:\n subcommand = self.argv[1]\n except IndexError:\n subcommand = 'help' # Display help if no arguments were given.\n\n if subcommand == 'help':\n if len(args) <= 2:\n parser.print_lax_help()\n sys.stdout.write(self.main_help_text() + '\\n')\n elif args[2] == '--commands':\n sys.stdout.write(\n self.main_help_text(commands_only=True) + '\\n')\n else:\n self.fetch_command(args[2]).print_help(self.prog_name, args[2])\n elif subcommand == 'version':\n sys.stdout.write(parser.get_version() + '\\n')\n # Special-cases: We want 'django-admin.py --version' and\n # 'django-admin.py --help' to work, for backwards compatibility.\n elif self.argv[1:] == ['--version']:\n # LaxOptionParser already takes care of printing the version.\n pass\n elif self.argv[1:] in (['--help'], ['-h']):\n parser.print_lax_help()\n sys.stdout.write(self.main_help_text() + '\\n')\n else:\n self.fetch_command(subcommand).run_from_argv(self.argv)", "def main():\n args = parse_args()\n process_args(args)", "def run_from_argv(self, argv):\n self._called_from_command_line = True\n parser = self.create_parser(argv[0], argv[1])\n options = parser.parse_args(argv[2:])\n cmd_options = vars(options)\n args = cmd_options.pop('args', ())\n self.execute(*args, **cmd_options)", "def run() -> None:\n arguments = sys.argv[1:]\n\n if not arguments:\n help_command(arguments=[])\n return\n\n commands_dict = {\n '--help': help_command,\n 'list': list_command,\n 'create': create_command,\n 'update': update_command,\n 'download': download_command,\n 'delete': delete_command,\n }\n\n command = arguments[0]\n command_handler = commands_dict.get(command)\n if command_handler is not None:\n command_handler(arguments)\n else:\n print(\"Can't perform {0} command. Please read help:\".format(command))\n help_command(arguments=[])", "def main_function():\n argument_parser, arguments = parse_arguments()\n \n load_config(arguments.config)\n \n if not execute_command(arguments):\n argument_parser.print_help()", "def query_cmdline():", "def run_from_args(command):\n return Effect(Run.from_args(command))", "def cli(self, args=None):\n parser = self.get_parser()\n parser.add_argument('--params', nargs='+', default=[])\n ns, opts, poss = parseargs(parser, args)\n if ns.help_full:\n print_full_help(self)\n sys.exit()\n if ns.params:\n setnestedattr(self, deepmixdicts(*map(load_any, ns.params)))\n process_assignment_options(self, opts)\n self.execute(*poss)\n return self", "def handle_command_line():\n commands = scan_for_commands()\n parser = argparse.ArgumentParser(\n description=\"A set of utilities to ease the installation of Modoboa.\",\n epilog=\"\"\"Available commands:\n%s\n\"\"\" % \"\\n\".join([\"\\t%s\" % c for c in sorted(commands)]))\n parser.add_argument(\"--verbose\", action=\"store_true\",\n help=\"Activate verbose output\")\n parser.add_argument(\"command\", type=str,\n help=\"A valid command name\")\n (args, remaining) = parser.parse_known_args()\n\n if args.command not in commands:\n print(\"Unknown command '%s'\" % args.command, file=sys.stderr)\n sys.exit(1)\n\n commands[args.command](commands, verbose=args.verbose).run(remaining)", "def cli():\n config, auth, execute_now = read_command_line_arguments()\n main(config, auth, execute_now)", "def run ( self ) :\n exec self._cmd in self._myglobals,self._mylocals", "def run(self):\n\n input_args = {}\n self._execute(input_args, self.args)", "def main(self):\n cmd, path, args = self._parse_args()\n if cmd == \"shell\":\n print \"You are now in ubs shell.\"\n print \"Use \\\"python %s help\\\" to see other choice.\" % sys.argv[0]\n self.shell()\n elif cmd == \"help\":\n self.print_path_help(path)\n sys.exit(0) \n elif cmd == \"run\":\n self.route(path, args)\n else:\n raise Exception(\"unknown CMD %s\" % cmd)", "def read_cmd(self):\n\n parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)\n req_opts = parser.add_argument_group(\"Required Options\")\n req_opts.add_argument(\"--instance_dir\", required=True,\n help=\"directory with instances (not recursive\")\n \n opt_opts = parser.add_argument_group(\"Optional Options\")\n \n opt_opts.add_argument(\"--fn_suffix\", default=\".*\",\n help=\"suffix of instance file names\")\n opt_opts.add_argument(\"--cutoff\", default=10, type=int,\n help=\"running time cutoff [sec]\")\n opt_opts.add_argument(\"--memlimit\", default=2048, type=int,\n help=\"memory limit\")\n opt_opts.add_argument(\"--ac_budget\", default=360,\n help=\"configuration budget [sec]\")\n opt_opts.add_argument(\"--run_obj\", default=\"runtime\",\n choices=[\"runtime\", \"quality\"],\n help=\"run objective\")\n opt_opts.add_argument(\"--par-factor\", default=10,\n help=\"Factor by which to penalize unsolved instances. Usage may differ based on TAE used.\")\n\n opt_opts.add_argument(\"--binary\", default=\"clingo\",\n help=\"target binary\")\n opt_opts.add_argument(\"--pcs_file\", default=\"pcs/all_params.pcs\",\n help=\"parameter configuration file\")\n opt_opts.add_argument(\"--runsolver\", default=\"binaries/runsolver\",\n help=\"runsolver binary\")\n opt_opts.add_argument(\"--tae_class\", default=None,\n help=\"TAE class to individualize clingo calls -- has to inherit from smac.tae.execute_ta_run_aclib.ExecuteTARunAClib\")\n\n\n opt_opts.add_argument(\"--seed\", default=12345, type=int,\n help=\"random seed\")\n opt_opts.add_argument(\"--verbose_level\", default=logging.INFO,\n choices=[\"INFO\", \"DEBUG\"],\n help=\"random seed\")\n opt_opts.add_argument(\"--tae_args\", default=\"{}\",\n help=\"Miscellaneous options for the TAE\")\n \n\n args_, misc = parser.parse_known_args()\n self._check_args(args_)\n args_.tae_args=json.loads(args_.tae_args)\n\n # remove leading '-' in option names\n misc = dict((k.lstrip(\"-\"), v.strip(\"'\"))\n for k, v in zip(misc[::2], misc[1::2]))\n\n misc[\"instances\"] = self._find_files(dir_=args_.instance_dir, suffix_=args_.fn_suffix)\n misc[\"wallclock_limit\"] = args_.ac_budget\n misc[\"cutoff_time\"] = args_.cutoff\n misc[\"paramfile\"] = args_.pcs_file\n misc[\"algo\"] = \"\"\n misc[\"run_obj\"] = args_.run_obj\n\n return args_, misc", "def main(self):\n cmd = \"self.%s(sys.stdin)\" % sys.argv[1]\n exec(cmd)", "def execute(self, args):\r\n pass", "def do_con_read(self, *args):\n with suppress(SystemExit):\n command = self.cli.con_parser.parse_args(args)\n command.func(**vars(command))", "def run_from_argv(self, argv):\r\n self.progname = argv[0]\r\n super(Command, self).run_from_argv(argv)", "def _process_command(self, **kwargs):\n return self.run_command(**kwargs)", "def run_from_argv(self, argv):\n parser = self.create_parser(argv[0], argv[1])\n options, args = parser.parse_args(argv[2:])\n handle_default_options(options)\n self.execute(*args, **options.__dict__) # pylint: disable-msg=W0142", "def do_command(): # pragma: no cover\n args = parse_args(sys.argv[1:])\n status = run(args)\n sys.exit(status)", "def run(self, commands: list[str]):\n ...", "async def _run_command(self, command, *args, **kwargs):\n pass", "def main():\n file_requested = obtain_filename()\n process_command(file_requested)", "def read_command( argv ):\n from optparse import OptionParser\n usageStr = \"\"\"\n USAGE: python tortoise.py <options>\n EXAMPLES: python tortoise.py --agent ReflexBrain\n OR python tortoise.py -a ReflexBrain\n - run tortoise with the reflex agent\n \"\"\"\n parser = OptionParser(usageStr)\n \n parser.add_option('-a', '--agent', dest = 'agent',\n help = default('The agent to use'),\n metavar = 'TYPE', default = 'ReflexBrain')\n parser.add_option('-w', '--width', dest = 'width',\n help = default('World width'), default = 15)\n parser.add_option('-s', '--speed', dest = 'speed',\n help = default('Speed'), default = 40)\n parser.add_option('-r', '--random-seed', dest = 'random_seed',\n help = default('Random'), default = -1)\n \n options, otherjunk = parser.parse_args(argv)\n\n if len(otherjunk) != 0:\n raise Exception('Command line input not understood: ' + str(otherjunk))\n args = dict()\n \n # Choose a Tortoise solver\n try:\n module = __import__('agents')\n if options.agent in dir(module):\n agent = getattr(module, options.agent)\n args['agent'] = agent()\n else:\n raise Exception('Unknown agent: ' + options.agent)\n except ImportError:\n raise Exception('No file agents.py')\n \n args['width'] = int(options.width)\n args['speed'] = int(options.speed)\n args['random_seed'] = int(options.random_seed)\n return args", "def execute(self):\n\n options, args = self.parser.parse_args(self.argv)\n\n try:\n subcommand_name = self.argv[1]\n except IndexError:\n subcommand_name = 'help'\n\n if subcommand_name == 'help':\n if len(args) <= 2:\n self.print_help()\n else:\n self.fetch_subcommand(self.argv[2]).print_help()\n elif subcommand_name == 'version':\n self.print_version()\n else:\n self.fetch_subcommand(subcommand_name).execute()", "def main(text):\n _register_plugins()\n\n arguments, command_arguments = parse_arguments(text)\n\n if arguments.verbose:\n _LOGGER.setLevel(logging.DEBUG)\n\n arguments.execute(arguments, command_arguments)", "def main(self):\n try:\n config_provider = ConfigurationProvider(MaskReplacer(), DirectorySelector())\n parser = CommandParser(\n config_provider,\n RubyRipperCdRipper(config_provider),\n DiscogsMetadataService(),\n GenreSelector())\n commands = parser.from_args(self._get_arguments())\n for command in commands:\n command.validate()\n command.execute()\n return 0\n except Exception as ex:\n # This will be replaced with proper logging output.\n sys.stderr.write('{0}\\n'.format(ex.message))\n sys.stderr.write('{0}\\n'.format(traceback.format_exc()))\n return 255", "def _run_command(args):\n subprocess.run(args, check=True)", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli() -> None:\r\n config_argparse = _configfile_parser()\r\n config_args, _ = config_argparse.parse_known_args()\r\n\r\n defaults = {}\r\n\r\n if config_args.config: \r\n defaults = _load_config(config_args)\r\n\r\n parser = _cli(config_argparse, defaults)\r\n _add_standard_args(parser) \r\n \r\n subparser = parser.add_subparsers()\r\n _add_create_command(subparser)\r\n _add_update_command(subparser) \r\n\r\n args = parser.parse_args()\r\n command = args.cmd\r\n command.execute(args)", "def _main():\n try:\n parser, options, arguments = _parse_options()\n env.update(vars(options))\n\n arguments = parser.largs\n\n if not (arguments):\n parser.print_help()\n sys.exit(0)\n\n commands_to_run = _parse_arguments(arguments)\n\n for name, args, kwargs in commands_to_run:\n try:\n func = globals()[name]\n except KeyError:\n sys.stderr.write('Command %s does not exist' % name)\n sys.exit(1)\n\n func(*args, **kwargs)\n except SystemExit:\n raise\n except KeyboardInterrupt:\n sys.stdout.write('\\nQuit\\n')\n\n sys.exit(0)", "def do_command(self, args):\n chk_arg_count(args, 0)\n dbops.init_database()", "def execute_tool(description, *args):\n command_line = list(args) + files_and_directories\n click.echo(f\"{description}: {' '.join(command_line)}\")\n rv = call(command_line)\n if rv != 0:\n exit(rv)", "def launch(**kwargs):\n logger.info('launch dream command')\n launch_gui()", "def run(self, args):\n pass", "def _runner(self, classpath, main, jvm_options, args):", "def start(self):\n self.start_time = dt.datetime.now()\n self.call = ' '.join(sys.argv)\n self.commands = []", "def run_from_argv(self, argv):\n self._called_from_command_line = True\n index = 2\n options = type('', (), {'traceback': False})()\n try:\n while index < len(argv) and argv[index].startswith('-'):\n index += 1\n if index < len(argv):\n #subcommand = self.get_subcommand(argv[index])\n subargv = argv[:2] + argv[index+1:]\n subargv[1] = subargv[1] + '.' + argv[index]\n argv = argv[:index+1]\n else:\n subargv = argv[:2]\n\n parser = self.create_parser(argv[0], argv[1])\n options = parser.parse_args(argv[2:])\n cmd_options = vars(options)\n args = cmd_options.pop('args', ())\n handle_default_options(options)\n\n #cmd_options['__command'] = subcommand\n cmd_options['__argv'] = subargv\n\n self.execute(*args, **cmd_options)\n #subcommand.run_from_argv(subargv)\n except Exception as e:\n if options.traceback or not isinstance(e, CommandError):\n raise\n\n # SystemCheckError takes care of its own formatting.\n if isinstance(e, SystemCheckError):\n self.stderr.write(str(e), lambda x: x)\n else:\n self.stderr.write('%s: %s' % (e.__class__.__name__, e))\n sys.exit(1)\n finally:\n try:\n connections.close_all()\n except ImproperlyConfigured:\n # Ignore if connections aren't setup at this point (e.g. no\n # configured settings).\n pass", "def main(args):\n cli = CLI()\n # Check arguments\n cli.parse_arguments(args)", "def _run_args(cls, args: Optional[List[str]] = None):\n parser = cls.setup_args()\n opt = parser.parse_args(args=args)\n return cls._run_from_parser_and_opt(opt, parser)", "def Run(args):\n\n args = args[list(args.keys())[0]]\n\n oobj = Syntax([\n Template(\"FILE\", subc=\"\", ktype=\"literal\", var=\"projfile\"),\n Template(\"PASSWORD\", subc=\"\", ktype=\"literal\", var=\"password\"),\n Template(\"STARTUP\", subc=\"\", ktype=\"str\", var=\"startup\",\n vallist=[\"asis\", \"set\", \"delete\"]),\n \n Template(\"HELP\", subc=\"\", ktype=\"bool\")])\n \n #enable localization\n global _\n try:\n _(\"---\")\n except:\n def _(msg):\n return msg\n # A HELP subcommand overrides all else\n if \"HELP\" in args:\n #print helptext\n helper()\n else:\n processcmd(oobj, args, doproj)", "def run_command(self, prog_name, *args):\n self._run_command(prog_name, list(args))", "def work(self):\n\n cmd = self.options.command\n cmdargs = self.options.args\n\n # find function\n fname = \"cmd_\" + cmd.replace('-', '_')\n if not hasattr(self, fname):\n self.log.error('bad subcommand, see --help for usage')\n sys.exit(1)\n fn = getattr(self, fname)\n\n b = inspect.signature(fn).bind(*cmdargs)\n\n fn(*b.args, **b.kwargs)", "def _handle_command(args):\n options = (\n ('url', 'Enter the Quality Center URL (e.g., '\n '\"http://localhost/qcbin\"):'),\n ('domain', 'Enter the Quality Center Domain:'),\n ('project', 'Enter the Quality Center Project:'),\n ('username', 'Enter your Quality Center username:'),\n ('source', (r'Enter the path to the test results '\n r'(e.g., c:\\testing\\resultparsers\\output.xml\"):')),\n ('destination', ('Enter the destination path in Quality Center '\n '(e.g., \"UAT/my folder/subfolder\"):')),\n ('attach_report', 'Attach report? (yes/no)')\n )\n cfg = importer.load_config()\n if not args.console and not any((getattr(args, opt[0]) for opt in options)):\n rr = gui.QcriGui(cfg)\n rr.mainloop()\n return\n use_history = cfg.getboolean('main', 'history')\n hist = importer.load_history() if use_history else None\n try:\n for opt in options:\n _set_argument(args, opt, hist)\n if not args.password:\n args.password = getpass.getpass()\n except KeyboardInterrupt:\n return\n if use_history:\n importer.save_history(hist)\n parser = _get_parser(args.source, cfg)\n if parser is None:\n LOG.error('parser not found for source: %s', args.source)\n return\n results = importer.parse_results(parser, args.source, cfg)\n # get a Quality Center connection\n qcc = None\n try:\n qcc = qualitycenter.connect(\n args.url,\n args.domain,\n args.project,\n args.username,\n args.password)\n importer.import_results(\n qcc,\n args.destination,\n results,\n strtobool(args.attach_report))\n except pythoncom.com_error as e:\n LOG.exception(e)\n finally:\n qualitycenter.disconnect(qcc)\n print('Import complete.')", "def main():\n\n # Load all the commands\n commands = {}\n for entry_point in pkg_resources.iter_entry_points('nagare.commands'):\n try:\n commands[entry_point.name] = entry_point.load()\n except ImportError:\n print \"Warning: the command '%s' can't be imported\" % entry_point.name\n raise\n\n parser = optparse.OptionParser(usage='\\n'.join(usage(commands)))\n\n if (len(sys.argv) == 1) or (sys.argv[1] == '-h') or (sys.argv[1] == '--help'):\n parser.print_usage(sys.stderr)\n parser.exit()\n\n command_name = sys.argv[1]\n command = commands.get(command_name)\n if command is None:\n parser.error(\"command '%s' not found\" % command_name)\n\n parser.usage = '%%prog %s [options]' % command_name\n\n argv = command.set_options(parser) # Let the command register its command line options\n (options, args) = parser.parse_args((argv if argv is not None else sys.argv)[2:]) # Parse the command line\n\n command.run(parser, options, args) # Run the command", "def run(self, argv):\r\n try:\r\n index = 0\r\n command_pos = -1\r\n help_pos = -1\r\n help_command_pos = -1\r\n for arg in argv:\r\n if arg == 'bash-completion':\r\n self._bash_completion()\r\n return 0\r\n if arg in self.commands[self.api_version]:\r\n if command_pos == -1:\r\n command_pos = index\r\n elif arg in ('-h', '--help'):\r\n if help_pos == -1:\r\n help_pos = index\r\n elif arg == 'help':\r\n if help_command_pos == -1:\r\n help_command_pos = index\r\n index = index + 1\r\n if command_pos > -1 and help_pos > command_pos:\r\n argv = ['help', argv[command_pos]]\r\n if help_command_pos > -1 and command_pos == -1:\r\n argv[help_command_pos] = '--help'\r\n self.options, remainder = self.parser.parse_known_args(argv)\r\n self.configure_logging()\r\n self.interactive_mode = not remainder\r\n self.initialize_app(remainder)\r\n except Exception as err:\r\n if self.options.verbose_level == self.DEBUG_LEVEL:\r\n self.log.exception(unicode(err))\r\n raise\r\n else:\r\n self.log.error(unicode(err))\r\n return 1\r\n result = 1\r\n if self.interactive_mode:\r\n _argv = [sys.argv[0]]\r\n sys.argv = _argv\r\n result = self.interact()\r\n else:\r\n result = self.run_subcommand(remainder)\r\n return result", "def Run(self):\n\n # Execution on the master\n if self.cmd == 'MASTER':\n self.PerformMirroredUpgrade()\n \n # The rest of the options are executed on remote hosts\n elif self.cmd == 'CHKDOWN':\n self.CheckDown()\n elif self.cmd == 'SETCATVERSION':\n self.SetCatVersion((str(self.option)==str(True)))\n else:\n raise Exception('Unknown cmd: ' + str(self.cmd))\n \n if self.pool:\n t = self.pool\n self.pool = None\n del t", "def exec_from_command_line(argv: List[str]) -> None:\n\n if len(argv) == 1:\n raise Exception(\"Not enough arguments.\")\n else:\n m = Management(argv[1:])\n m.execute()", "def run_from_argv(self, argv):\n self._called_from_command_line = True\n parser = self.create_parser(argv[0], argv[1])\n options = parser.parse_args(argv[2:])\n cmd_options = vars(options)\n # Move positional args out of options to mimic legacy optparse\n args = cmd_options.pop('args', ())\n self.check()\n try:\n self.execute(*args, **cmd_options)\n except Exception as e:\n print(traceback.format_exc())\n self.stderr.write('%s: %s' % (e.__class__.__name__, e))\n\n sys.exit(1)", "def _command(self, *cmd, handler=None):", "def load_cli():\n args=IO()\n\n if(args.command is None):\n logging.error('Please provide the appropriate input. Enter \"python -m packman -h\" for more details.')\n exit()\n\n logging.basicConfig(stream=args.logfile)\n\n if(args.pdbid is not None):\n molecule.download_structure(args.pdbid, save_name=args.filename.split('.')[0], ftype=args.filename.split('.')[1])\n\n try:\n extension = args.filename.split('.')[-1]\n mol = molecule.load_structure(args.filename,ftype=extension)\n except:\n logging.warning(\"The filename provided does not appear to have a format extension.\")\n mol = molecule.load_structure(args.filename)\n \n if(args.command == 'hinge'):\n hinge_cli(args,mol)\n elif(args.command == 'hdanm'):\n hdanm_cli(args,mol)\n elif(args.command == 'entropy'):\n entropy_cli(args,mol)\n elif(args.command == 'dci'):\n dci_cli(args,mol)\n\n return True", "def run(self, args=[], path=DEFAULT_DMENU_PATH):\n options = self.create_options()\n option = dmenu(options, args=args, path=path)\n if option:\n self.handle_option(option, options)", "def _callOnCommandLine(self, cmd=[]):\n\t\tp = Popen(cmd, stdout=PIPE, stderr=PIPE, stdin=PIPE)\n\t\tstdout, stderr = p.communicate()\n\t\treturn stdout, stderr", "def cmdline(self, args=()):\n cmds = [self._interpreter.binary]\n cmds.append(self._pex)\n cmds.extend(args)\n return cmds", "def runCommand(command):\n None", "def execute(self):\n try:\n subcommand = self.argv[1]\n except IndexError:\n subcommand = \"help\" # Display help if no arguments were given.\n\n # These options could affect the commands that are available, so they\n # must be processed early.\n parser = CommandParser(\n prog=self.prog_name,\n usage=\"%(prog)s subcommand [options] [args]\",\n add_help=False,\n allow_abbrev=False,\n )\n parser.add_argument(\"args\", nargs=\"*\") # catch-all\n try:\n options, args = parser.parse_known_args(self.argv[2:])\n except CommandError:\n pass # Ignore any option errors at this point.\n\n if subcommand == \"help\":\n if \"--commands\" in args:\n sys.stdout.write(self.main_help_text(commands_only=True) + \"\\n\")\n elif not options.args:\n sys.stdout.write(self.main_help_text() + \"\\n\")\n else:\n self.fetch_command(options.args[0]).print_help(\n self.prog_name, options.args[0]\n )\n elif subcommand == \"version\" or self.argv[1:] == [\"--version\"]:\n sys.stdout.write(get_named_version() + \"\\n\")\n elif self.argv[1:] in ([\"--help\"], [\"-h\"]):\n sys.stdout.write(self.main_help_text() + \"\\n\")\n else:\n self.fetch_command(subcommand).run_from_argv(self.argv)" ]
[ "0.6881134", "0.66955614", "0.6658523", "0.6462016", "0.64445746", "0.6421537", "0.6377791", "0.63629246", "0.6346535", "0.6292145", "0.62857026", "0.6277357", "0.62285894", "0.62285894", "0.6209417", "0.61959785", "0.61842215", "0.61753494", "0.61505806", "0.6132218", "0.6131749", "0.612376", "0.6104211", "0.60969377", "0.6071521", "0.60675937", "0.60643554", "0.606024", "0.60589004", "0.60570383", "0.6053771", "0.6053749", "0.60525876", "0.60522497", "0.6049699", "0.6033951", "0.60259855", "0.6023052", "0.6005731", "0.6001093", "0.5985765", "0.59750456", "0.5970467", "0.5969402", "0.5967306", "0.5961277", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5959264", "0.5957996", "0.59531534", "0.59478813", "0.59334975", "0.59297305", "0.5929667", "0.591272", "0.59106386", "0.58823514", "0.5868053", "0.58676916", "0.58621734", "0.58616745", "0.58599967", "0.5859439", "0.58578557", "0.58497924", "0.5845287", "0.58435494", "0.5831426", "0.581669", "0.58143646", "0.5812818", "0.58077997", "0.58065516", "0.5801616", "0.5795667" ]
0.0
-1
Commandline application entry point.
def main(): try: return run_sysca(sys.argv[1:]) except InvalidCertificate as ex: die(str(ex))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main(args=None):\n app()\n return 0", "def main():\n CLI_APP.run()", "def main():\n app = App()\n app.run()", "def main(args=None):", "def main(args=None):", "def main(args):", "def main(args):", "def cli():\n config, auth, execute_now = read_command_line_arguments()\n main(config, auth, execute_now)", "def main():\n print(\"Call your main application code here\")", "def main():\n print(\"Call your main application code here\")", "def main():\n print(\"Call your main application code here\")", "def main(argv=sys.argv):\n logging.basicConfig()\n exit_code = 1\n try:\n app = Application(argv)\n app.run()\n exit_code = 0\n except KeyboardInterrupt:\n exit_code = 0\n except Exception as exc:\n LOG.exception(exc)\n sys.exit(exit_code)", "def main():\n\tcli = Cli()\n\tcli.run()", "def main():\n app.run(debug=True)", "def main():\n Log.info('Installing...')\n app = Application()\n app.run()\n Log.info(\"Done successfully.\")", "def main():\n import sys\n FILES.extend(sys.argv[1:])\n app.debug = True\n app.run(port=5001, threaded=False)", "def main(args=None):\n pass", "def main(config: str):\n application = Application(config_path=config)\n application.run()", "def main():\n return", "def main():\n LOGGER.info('Loading Application')\n main_app = Application()\n parser = argparse.ArgumentParser()\n parser.add_argument(\"-c\", \"--console\", help=\"Command Line Mode\", action=\"store_true\")\n args = parser.parse_args()\n if args.console:\n LOGGER.info('Command Line Mode')\n main_app.run()\n else:\n main_app.controller.gui_enabled = True\n try:\n import gui\n except ModuleNotFoundError:\n from herdcl import gui\n app = gui.MainUI()\n LOGGER.info('Opening GUI')\n app.mainloop()", "def entry_point():", "def entry_point():", "def entry_point():", "def main(ctx, verbose):\n return", "def main() -> None:\n return", "def main(self) -> None:\n pass", "def main():\n app = RunSnakeRunApp(0)\n app.MainLoop()", "def main():\n print(\"def main\")\n return APP.run()", "def main(args):\n cli = CLI()\n # Check arguments\n cli.parse_arguments(args)", "def main() -> None:", "def main() -> None:", "def main() -> None:", "def main() -> None:", "def main():\n pass", "def main():\n args = parse_args()\n process_args(args)", "def startapp():", "def entrypoint(cls):\n try:\n cls().run(sys.argv[1:])\n except KeyboardInterrupt:\n pass", "def run():\n main(sys.argv[1:])", "def run():\n main(sys.argv[1:])", "def run():\n main(sys.argv[1:])", "def run():\n main(sys.argv[1:])", "def run():\n main(sys.argv[1:])", "def run():\n main(sys.argv[1:])", "def run():\n main(sys.argv[1:])", "def run():\n main(sys.argv[1:])", "def main_cli():\n pass", "def cli():\n pass", "def entry_point() -> int:\n return run(argv=sys.argv[1:], stdout=sys.stdout, stderr=sys.stderr)", "def main():\n sys.exit(RBExt().run(sys.argv[1:]))", "def main() -> None:\n try:\n config = Config.load_config()\n asyncio.run(App(config=config, no_history=False).run())\n except ClientError:\n raise\n except Exception as e:\n raise Bug(str(e))", "def run_main():\n main(sys.argv)", "def main() -> None:\n try:\n # ServerManager expects cwd to be the server dir (containing\n # dist/, config.yaml, etc.)\n # Let's change our working directory to the location of this file\n # so we can run this script from anywhere and it'll work.\n os.chdir(os.path.abspath(os.path.dirname(__file__)))\n\n ServerManagerApp().run_interactive()\n except CleanError as exc:\n # For clean errors, do a simple print and fail; no tracebacks/etc.\n exc.pretty_print()\n sys.exit(1)", "def entry_point():\n\n\n plac.call(main)", "def cli(args): # noqa; pylint: disable=unused-argument", "def entry_point():\n raise SystemExit(main(sys.argv))", "def entry_point():\n raise SystemExit(main(sys.argv))", "def entry_point():\n raise SystemExit(main(sys.argv))", "def main():\n parser = argparse.ArgumentParser()\n # Set the default entrypoint for nothing.\n parser.set_defaults(func=lambda x: None)\n # Configure the CLI for this script.\n appsec_wtf.cli.exec_poc.set_cli_opts(parser)\n\n # Parse the CLI arguments.\n args = parser.parse_args()\n # Execute the entry point of the command being executed.\n args.func(args)", "def main():\n cli = CommandLineInterface(NAME, package=\"nemo_nowcast\", description=__doc__)\n cli.build_parser()\n parsed_args = cli.parser.parse_args()\n config = Config()\n config.load(parsed_args.config_file)\n msg = _configure_logging(config)\n logger.info(f\"running in process {os.getpid()}\")\n logger.info(f\"read config from {config.file}\")\n logger.info(msg)\n run(config)", "def main():\n ensure_not_root()\n config.setup()\n model.init_db()\n manager.run()", "def main():\n run_program()", "def main():\n parser = optparse.OptionParser(usage='%prog [options]', version=\"0.0.1\")\n parser.add_option('--settings', \\\n help='Python path to settings module. If this isn\\'t provided, the DJANGO_SETTINGS_MODULE enviroment variable will be used.')\n\n parser.add_option('-v', '--verbose', action='store_true', dest='verbose', \\\n default=False, help='Verbose output.')\n options = parser.parse_args()[0]\n if options.settings:\n os.environ[\"DJANGO_SETTINGS_MODULE\"] = options.settings\n else:\n os.environ[\"DJANGO_SETTINGS_MODULE\"] = \"settings\"\n\n probe_all()", "def run():\n\n call_args = sys.argv[1:]\n main(call_args)", "def main():\n print(\"is Running!\")", "def launch_cli() -> None:\n app.run(main, flags_parser=_parse_flags)", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def entry_point():\n pass", "def main():\n parser = argparse.ArgumentParser(\n description='A testbench for the Google Cloud C++ Client Library')\n parser.add_argument('--host', default='localhost',\n help='The listening port')\n parser.add_argument('--port', help='The listening port')\n # By default we do not turn on the debugging. This typically runs inside a\n # Docker image, with a uid that has not entry in /etc/passwd, and the\n # werkzeug debugger crashes in that environment (as it should probably).\n parser.add_argument('--debug', help='Use the WSGI debugger',\n default=False, action='store_true')\n arguments = parser.parse_args()\n\n # Compose the different WSGI applications.\n application = wsgi.DispatcherMiddleware(root, {\n '/httpbin': httpbin.app,\n GCS_HANDLER_PATH: gcs,\n UPLOAD_HANDLER_PATH: upload,\n })\n serving.run_simple(arguments.host, int(arguments.port), application,\n use_reloader=True, use_debugger=arguments.debug,\n use_evalex=True)", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():" ]
[ "0.83380365", "0.81912965", "0.78934926", "0.77137196", "0.77137196", "0.766038", "0.766038", "0.75786704", "0.7566778", "0.7566778", "0.7566778", "0.7561944", "0.7548935", "0.75437146", "0.752279", "0.7502812", "0.74949706", "0.74422157", "0.74301237", "0.7409354", "0.7397369", "0.7397369", "0.7397369", "0.7395466", "0.738338", "0.7365718", "0.7331219", "0.73061156", "0.72669864", "0.7247295", "0.7247295", "0.7247295", "0.7247295", "0.721006", "0.7198366", "0.7195569", "0.7181712", "0.71621794", "0.71621794", "0.71621794", "0.71621794", "0.71621794", "0.71621794", "0.71621794", "0.71621794", "0.71575284", "0.71454036", "0.7143922", "0.71375376", "0.7116588", "0.7111315", "0.7106651", "0.7102649", "0.7075355", "0.7019573", "0.7019573", "0.7019573", "0.70158106", "0.69923514", "0.69920826", "0.6986964", "0.6985778", "0.6971962", "0.6971187", "0.69613564", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6947633", "0.6946629", "0.69441277", "0.6930272", "0.6930272", "0.6930272", "0.6930272", "0.6930272", "0.6930272", "0.6930272", "0.6930272", "0.6930272", "0.6930272", "0.6930272", "0.6930272" ]
0.0
-1
resets counters to 0
def reset(self): self.correct_count = 0 self.total_count = 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reset_counter(self) -> None:", "def reset(self):\n self.counter = 0", "def reset (self):\n self.counter = 0", "def resetCounters(self):\n self.chain.zero_counters()\n counters = self.session.query(Counter).all()\n self.session.query(Counter).delete()", "def reset_count(self):\n self.count = 0", "def reset(self):\n for counterKey in self.counters.keys():\n self.counters[counterKey]=0\n self.title=None # 025 This is a hack of a hack. Trying to find if the counter was reset recently.", "def reset(self):\n for i in range(0, len(self.__counts)):\n self.__counts[i] = 0\n self.__overflow = 0\n self.__total_count = 0\n self.__total_values = 0\n self.__min = None\n self.__max = None", "def reset(self):\n self.sum_metric = 0.\n self.num_inst = 0.\n self.metrics.reset_stats()", "def reset(self) -> None:\n self.current = 0\n self.num_cuts = 0", "def reset_counter(self):\n self.counter = 0\n self.highlight_input()", "def reset_counter(self) -> None:\n self._fail_counter = 0", "def reset(self):\n self.avg = 0\n self.sum = 0\n self.cnt = 0", "def clear(self):\n #for counterName in self.counters:\n # del self.counters[counterName]\n self.counters={}\n self.title=None", "def clear(self):\n self.counts = [{} for _ in range(len(self.counts))]", "def _reset_count(self):\n self._triple_count = 0\n self._error_count = 0\n self._ignored_count = 0", "def reset(self):\n self.reset_count += 1\n self._init_data()", "def reset(self):\n self.total_pulls = 0\n self.total_score = 0\n self.npulls = np.zeros(self.k)\n self.score = np.zeros(self.k)", "def reset(self):\n self.val = 0\n self.avg = 0\n self.sum = 0\n self.count = 0", "def reset(self):\n self.loss = 0\n self.cnt = 0", "def reset(self):\n self.liidx = 0\n self.clidx = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset_axis_counters(self):\n\n self.column_counter = 0\n self.row_counter = 0", "def reset(self):\n self.tot = 0\n self.cnt = [0.0 for _ in range( self.alpha.getLen() )]", "def reset_index(self):\n self.increments = 0", "def reset(self):\n self._total_value = 0.0\n self._count = 0", "def reset(self):\n self._accumulated_time.clear()\n self._hit_count.clear()", "def reset(self) -> None:\n self.statistics = defaultdict(int)", "def reset(self):\n self.num_inst = 0\n self.sum_metric = 0.0", "def reset(self):\n self._idx = 0", "def resetCount(self):\n self.currentIndex = 0\n self.updateCurrentCommand()", "def reset(self) -> None:\n self.true_positives = 0\n self.all_positives = 0", "def reset(self):\n self.count = 0\n self.soft = False\n self.can_double = True\n self.can_split = False\n self.first_card = 0", "def clear():\n\t\tModel.counter = 0", "def reset_counter(self):\n if hasattr(self._id_generator, \"reset_counter\"):\n self._id_generator.reset_counter()", "def reset(self):\n self.c_count = 0\n self.a_count = -1\n self.epsilon = self.init_epsilon", "def clear(self):\n self.counts = [0] * len(self.values)\n if HAS_NUMPY:\n self.counts = numpy.array(self.counts)", "def reset(self):\n self._current_index = 0", "def reset(self):\n self.start_times = {}\n self.stats = defaultdict(OnlineMeter) # float defaults to 0", "def reset(self) -> None:\n self.counterpoint = self.counterpoint[0:1]\n self.__initialize_piano_roll()\n self.__set_defaults_to_runtime_variables()", "def reset(self):\n self.complete_misses = 0\n return", "def reset(self):\r\n\t\tself.index = 0", "def reset(self):\n self.stats = {}", "def stats_reset(self):\n self.stats.reset()", "def stats_reset(self):\n self.stats.reset()", "def reset(self) -> None:\n self.true_positives = 0\n self.actual_positives = 0", "def reset(self):\n self.damage_dealt = 0\n self.kills = 0\n self.got_killed = False\n self.fitness = 0", "def reset(self):\n self.table[:, :] = 0\n self.counts[:] = 0\n self.names = []\n self.hashesperid.resize(0)\n self.dirty = True", "def reset():", "def reset():", "def reset():", "def reset(self) -> List[int]:", "def reset(self):\n self.accumulation = None", "def reset(self):\n self.accumulation = None", "def reset(self):\n self._lastRoll = None\n self._initialSum = 0\n self._rollCount = 0", "def reset(self):\n self.test = 0\n self.pos = 0", "def reset(self):\n self.test = 0\n self.pos = 0", "def reset(self):\n self._open_activity_count = 0\n self._decisions = []\n self._tasks = TaskRegistry()", "def reset(self):\n self.restart()\n self.cycles = 0", "def reset(self):\r\n self.buffer = np.zeros(self.nBins)\r\n self.counter = 0", "def reset(self):\n self.algo_state = {}\n self.actual_repetitions = 0\n self.next_session = -1\n self.last_session = -1\n self.past_quality = []", "def reset(self):\n for i in range(0, len(self.current_state)):\n self.current_state[i] = 0\n\n for i in range(0, len(self.weights)):\n self.weights[i] = 0", "def testCounter():\n c = Counter()\n print(\"Expect 0: \", c)\n for i in range(5):\n c.increment()\n print(\"Expect 5: \", c)\n c.reset()\n print(\"Expect 0: \", c)", "def reset():\n global counter, total_attempts, successful_stops\n timer.stop()\n counter = 0\n total_attempts = 0\n successful_stops = 0", "def resetOperationCount():\n global _operationCount\n _countLock.acquire()\n try:\n _operationCount = 0\n finally:\n _countLock.release()", "def reset(self) -> None:\n self.statistics = defaultdict(float)", "def reset(self):\n self.cumtime = 0\n self.start_time = self.time()", "def reset(self):\n self.last_round = False\n self.last_player = None\n self.scores = [0] * self.num_players\n self.current_player = 0\n self.turn = 0\n self.roll = None", "def reset(self) -> List[int]:\n pass", "def reset(self):\n self.count = 0\n self.barrier.acquire()\n self.is_set = True", "def reset(self):\n self._value_estimates[:] = self.prior\n self.action_attempts[:] = 0\n self.last_action = None\n self.t = 0", "def reset_counters(cls):\n for field in cls.__fields__:\n if field.should_auto_increment:\n cls._reset_counter(*field.get_counter({}))", "def reset_stats() -> None:\n STATS[\"cleaned\"] = 0\n STATS[\"null\"] = 0\n STATS[\"unknown\"] = 0", "def reset(self):", "def reset(self):", "def reset(self):", "def reset(self):", "def __reset(self):\n\t\tself.__highest = -float('inf')\n\t\tself.__lowest = float('inf')\n\t\tself.__total = 0\n\t\tself.__steps = 0\n\t\tself.__cold_days = 0", "def clear(self):\n self.sum_hit_at_one = 0.0\n self.sum_perr = 0.0\n self.sum_loss = 0.0\n self.map_calculator.clear()\n self.global_ap_calculator.clear()\n self.num_examples = 0", "def reset(self):\n ...", "def reset(self):\n ...", "def reset_records(self):\n self.score_record = []\n self.score_window = deque(maxlen=100)", "def reset_state(self):\n for name in self.metrics:\n self.metrics[name].reset_state()", "def reset(self):\n self.__sets = []\n self._computed = False", "def resetWriteCount(self):\n self.writeCount = 0", "def reset(self):\n\t\tself.pos = self.start\n\n\t\tself.weighted_n_left = 0.0\n\t\tself.weighted_n_right = self.weighted_n_node_samples\n\n\t\tself.label_count_left \t= np.zeros(self.n_classes)\n\t\tself.label_count_right \t= np.copy(self.label_count_total)", "def reset(self):\n\n self._begin = 0\n self._end = 0\n self._size = 0", "def reset(self):\n self.memory.clear()\n self.relative_base = 0\n self.input_queue.clear()\n self.instr_idx = 0", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self):\n \n pass", "def reset(self):\r\n store = get_store()\r\n nbval = store.get('Nbtimecompound')[\"value\"]\r\n for i in range(1, nbval):\r\n self.del_line(1)", "def reset(self):\n self.sum = [0.] * len(self.topk)\n self.data_num = 0\n self.pfm = [0.] * len(self.topk)", "def reset():\r\n pass", "def reset(self):\n\t\tself._initial = None\n\t\tself._start = None\n\t\tself._time = 0\n\t\tself._total = 0\n\t\treturn self", "def reset(self):\n self.visited = False\n self.calculated = False\n self.past_value = self.value\n self.value = 0", "def reset(self):\n self.score = None\n self.true = None\n self.meta = None", "def reset(self):\n self.ref_value = 0.0\n self._average = 0.0\n self.num_samples = 0" ]
[ "0.8522219", "0.8301985", "0.8270119", "0.82612026", "0.81530696", "0.7962359", "0.7749801", "0.7585864", "0.75297916", "0.74603754", "0.7459489", "0.7426602", "0.7421175", "0.7398909", "0.73889565", "0.7378627", "0.7351837", "0.7351327", "0.7349136", "0.7317789", "0.73143846", "0.73143846", "0.73143846", "0.7309193", "0.7274728", "0.7274077", "0.72478443", "0.7247323", "0.72226703", "0.718841", "0.7181822", "0.71816057", "0.71736586", "0.71651375", "0.71567965", "0.71277523", "0.7099073", "0.7095955", "0.70699215", "0.7063472", "0.70532256", "0.7041536", "0.70040137", "0.70036745", "0.699846", "0.699846", "0.69877017", "0.6979502", "0.69794476", "0.69784313", "0.69784313", "0.69784313", "0.6971098", "0.6969234", "0.6969234", "0.69661105", "0.6958027", "0.6958027", "0.6937", "0.6924717", "0.6920337", "0.69155186", "0.69089454", "0.6887569", "0.688417", "0.68837166", "0.68694824", "0.6868667", "0.68643135", "0.6849264", "0.6846356", "0.68454283", "0.68231153", "0.68219864", "0.6769134", "0.6769134", "0.6769134", "0.6769134", "0.6757061", "0.67357075", "0.673024", "0.673024", "0.67199314", "0.67138", "0.67059475", "0.6691127", "0.6686006", "0.6685858", "0.66710925", "0.6668975", "0.6668975", "0.6668975", "0.66663545", "0.6665713", "0.6652416", "0.6646904", "0.66296774", "0.66126275", "0.6610137", "0.6606228" ]
0.75962293
7
resets counters to 0
def reset(self) -> None: self.true_positives = 0 self.all_positives = 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reset_counter(self) -> None:", "def reset(self):\n self.counter = 0", "def reset (self):\n self.counter = 0", "def resetCounters(self):\n self.chain.zero_counters()\n counters = self.session.query(Counter).all()\n self.session.query(Counter).delete()", "def reset_count(self):\n self.count = 0", "def reset(self):\n for counterKey in self.counters.keys():\n self.counters[counterKey]=0\n self.title=None # 025 This is a hack of a hack. Trying to find if the counter was reset recently.", "def reset(self):\n for i in range(0, len(self.__counts)):\n self.__counts[i] = 0\n self.__overflow = 0\n self.__total_count = 0\n self.__total_values = 0\n self.__min = None\n self.__max = None", "def reset(self):\n self.correct_count = 0\n self.total_count = 0", "def reset(self):\n self.sum_metric = 0.\n self.num_inst = 0.\n self.metrics.reset_stats()", "def reset(self) -> None:\n self.current = 0\n self.num_cuts = 0", "def reset_counter(self):\n self.counter = 0\n self.highlight_input()", "def reset_counter(self) -> None:\n self._fail_counter = 0", "def reset(self):\n self.avg = 0\n self.sum = 0\n self.cnt = 0", "def clear(self):\n #for counterName in self.counters:\n # del self.counters[counterName]\n self.counters={}\n self.title=None", "def clear(self):\n self.counts = [{} for _ in range(len(self.counts))]", "def _reset_count(self):\n self._triple_count = 0\n self._error_count = 0\n self._ignored_count = 0", "def reset(self):\n self.reset_count += 1\n self._init_data()", "def reset(self):\n self.total_pulls = 0\n self.total_score = 0\n self.npulls = np.zeros(self.k)\n self.score = np.zeros(self.k)", "def reset(self):\n self.val = 0\n self.avg = 0\n self.sum = 0\n self.count = 0", "def reset(self):\n self.loss = 0\n self.cnt = 0", "def reset(self):\n self.liidx = 0\n self.clidx = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset_axis_counters(self):\n\n self.column_counter = 0\n self.row_counter = 0", "def reset(self):\n self.tot = 0\n self.cnt = [0.0 for _ in range( self.alpha.getLen() )]", "def reset_index(self):\n self.increments = 0", "def reset(self):\n self._total_value = 0.0\n self._count = 0", "def reset(self):\n self._accumulated_time.clear()\n self._hit_count.clear()", "def reset(self) -> None:\n self.statistics = defaultdict(int)", "def reset(self):\n self.num_inst = 0\n self.sum_metric = 0.0", "def reset(self):\n self._idx = 0", "def resetCount(self):\n self.currentIndex = 0\n self.updateCurrentCommand()", "def reset(self):\n self.count = 0\n self.soft = False\n self.can_double = True\n self.can_split = False\n self.first_card = 0", "def clear():\n\t\tModel.counter = 0", "def reset_counter(self):\n if hasattr(self._id_generator, \"reset_counter\"):\n self._id_generator.reset_counter()", "def reset(self):\n self.c_count = 0\n self.a_count = -1\n self.epsilon = self.init_epsilon", "def clear(self):\n self.counts = [0] * len(self.values)\n if HAS_NUMPY:\n self.counts = numpy.array(self.counts)", "def reset(self):\n self._current_index = 0", "def reset(self):\n self.start_times = {}\n self.stats = defaultdict(OnlineMeter) # float defaults to 0", "def reset(self) -> None:\n self.counterpoint = self.counterpoint[0:1]\n self.__initialize_piano_roll()\n self.__set_defaults_to_runtime_variables()", "def reset(self):\n self.complete_misses = 0\n return", "def reset(self):\r\n\t\tself.index = 0", "def reset(self):\n self.stats = {}", "def stats_reset(self):\n self.stats.reset()", "def stats_reset(self):\n self.stats.reset()", "def reset(self) -> None:\n self.true_positives = 0\n self.actual_positives = 0", "def reset(self):\n self.damage_dealt = 0\n self.kills = 0\n self.got_killed = False\n self.fitness = 0", "def reset(self):\n self.table[:, :] = 0\n self.counts[:] = 0\n self.names = []\n self.hashesperid.resize(0)\n self.dirty = True", "def reset():", "def reset():", "def reset():", "def reset(self) -> List[int]:", "def reset(self):\n self.accumulation = None", "def reset(self):\n self.accumulation = None", "def reset(self):\n self._lastRoll = None\n self._initialSum = 0\n self._rollCount = 0", "def reset(self):\n self.test = 0\n self.pos = 0", "def reset(self):\n self.test = 0\n self.pos = 0", "def reset(self):\n self._open_activity_count = 0\n self._decisions = []\n self._tasks = TaskRegistry()", "def reset(self):\n self.restart()\n self.cycles = 0", "def reset(self):\r\n self.buffer = np.zeros(self.nBins)\r\n self.counter = 0", "def reset(self):\n self.algo_state = {}\n self.actual_repetitions = 0\n self.next_session = -1\n self.last_session = -1\n self.past_quality = []", "def reset(self):\n for i in range(0, len(self.current_state)):\n self.current_state[i] = 0\n\n for i in range(0, len(self.weights)):\n self.weights[i] = 0", "def testCounter():\n c = Counter()\n print(\"Expect 0: \", c)\n for i in range(5):\n c.increment()\n print(\"Expect 5: \", c)\n c.reset()\n print(\"Expect 0: \", c)", "def reset():\n global counter, total_attempts, successful_stops\n timer.stop()\n counter = 0\n total_attempts = 0\n successful_stops = 0", "def resetOperationCount():\n global _operationCount\n _countLock.acquire()\n try:\n _operationCount = 0\n finally:\n _countLock.release()", "def reset(self) -> None:\n self.statistics = defaultdict(float)", "def reset(self):\n self.cumtime = 0\n self.start_time = self.time()", "def reset(self):\n self.last_round = False\n self.last_player = None\n self.scores = [0] * self.num_players\n self.current_player = 0\n self.turn = 0\n self.roll = None", "def reset(self) -> List[int]:\n pass", "def reset(self):\n self.count = 0\n self.barrier.acquire()\n self.is_set = True", "def reset(self):\n self._value_estimates[:] = self.prior\n self.action_attempts[:] = 0\n self.last_action = None\n self.t = 0", "def reset_counters(cls):\n for field in cls.__fields__:\n if field.should_auto_increment:\n cls._reset_counter(*field.get_counter({}))", "def reset_stats() -> None:\n STATS[\"cleaned\"] = 0\n STATS[\"null\"] = 0\n STATS[\"unknown\"] = 0", "def reset(self):", "def reset(self):", "def reset(self):", "def reset(self):", "def __reset(self):\n\t\tself.__highest = -float('inf')\n\t\tself.__lowest = float('inf')\n\t\tself.__total = 0\n\t\tself.__steps = 0\n\t\tself.__cold_days = 0", "def clear(self):\n self.sum_hit_at_one = 0.0\n self.sum_perr = 0.0\n self.sum_loss = 0.0\n self.map_calculator.clear()\n self.global_ap_calculator.clear()\n self.num_examples = 0", "def reset(self):\n ...", "def reset(self):\n ...", "def reset_records(self):\n self.score_record = []\n self.score_window = deque(maxlen=100)", "def reset_state(self):\n for name in self.metrics:\n self.metrics[name].reset_state()", "def reset(self):\n self.__sets = []\n self._computed = False", "def resetWriteCount(self):\n self.writeCount = 0", "def reset(self):\n\t\tself.pos = self.start\n\n\t\tself.weighted_n_left = 0.0\n\t\tself.weighted_n_right = self.weighted_n_node_samples\n\n\t\tself.label_count_left \t= np.zeros(self.n_classes)\n\t\tself.label_count_right \t= np.copy(self.label_count_total)", "def reset(self):\n\n self._begin = 0\n self._end = 0\n self._size = 0", "def reset(self):\n self.memory.clear()\n self.relative_base = 0\n self.input_queue.clear()\n self.instr_idx = 0", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self):\n \n pass", "def reset(self):\r\n store = get_store()\r\n nbval = store.get('Nbtimecompound')[\"value\"]\r\n for i in range(1, nbval):\r\n self.del_line(1)", "def reset(self):\n self.sum = [0.] * len(self.topk)\n self.data_num = 0\n self.pfm = [0.] * len(self.topk)", "def reset():\r\n pass", "def reset(self):\n\t\tself._initial = None\n\t\tself._start = None\n\t\tself._time = 0\n\t\tself._total = 0\n\t\treturn self", "def reset(self):\n self.visited = False\n self.calculated = False\n self.past_value = self.value\n self.value = 0", "def reset(self):\n self.score = None\n self.true = None\n self.meta = None", "def reset(self):\n self.ref_value = 0.0\n self._average = 0.0\n self.num_samples = 0" ]
[ "0.8522219", "0.8301985", "0.8270119", "0.82612026", "0.81530696", "0.7962359", "0.7749801", "0.75962293", "0.7585864", "0.75297916", "0.74603754", "0.7459489", "0.7426602", "0.7421175", "0.7398909", "0.73889565", "0.7378627", "0.7351837", "0.7351327", "0.7349136", "0.7317789", "0.73143846", "0.73143846", "0.73143846", "0.7309193", "0.7274728", "0.7274077", "0.72478443", "0.7247323", "0.72226703", "0.718841", "0.7181822", "0.71816057", "0.71651375", "0.71567965", "0.71277523", "0.7099073", "0.7095955", "0.70699215", "0.7063472", "0.70532256", "0.7041536", "0.70040137", "0.70036745", "0.699846", "0.699846", "0.69877017", "0.6979502", "0.69794476", "0.69784313", "0.69784313", "0.69784313", "0.6971098", "0.6969234", "0.6969234", "0.69661105", "0.6958027", "0.6958027", "0.6937", "0.6924717", "0.6920337", "0.69155186", "0.69089454", "0.6887569", "0.688417", "0.68837166", "0.68694824", "0.6868667", "0.68643135", "0.6849264", "0.6846356", "0.68454283", "0.68231153", "0.68219864", "0.6769134", "0.6769134", "0.6769134", "0.6769134", "0.6757061", "0.67357075", "0.673024", "0.673024", "0.67199314", "0.67138", "0.67059475", "0.6691127", "0.6686006", "0.6685858", "0.66710925", "0.6668975", "0.6668975", "0.6668975", "0.66663545", "0.6665713", "0.6652416", "0.6646904", "0.66296774", "0.66126275", "0.6610137", "0.6606228" ]
0.71736586
33
resets counters to 0
def reset(self) -> None: self.true_positives = 0 self.actual_positives = 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reset_counter(self) -> None:", "def reset(self):\n self.counter = 0", "def reset (self):\n self.counter = 0", "def resetCounters(self):\n self.chain.zero_counters()\n counters = self.session.query(Counter).all()\n self.session.query(Counter).delete()", "def reset_count(self):\n self.count = 0", "def reset(self):\n for counterKey in self.counters.keys():\n self.counters[counterKey]=0\n self.title=None # 025 This is a hack of a hack. Trying to find if the counter was reset recently.", "def reset(self):\n for i in range(0, len(self.__counts)):\n self.__counts[i] = 0\n self.__overflow = 0\n self.__total_count = 0\n self.__total_values = 0\n self.__min = None\n self.__max = None", "def reset(self):\n self.correct_count = 0\n self.total_count = 0", "def reset(self):\n self.sum_metric = 0.\n self.num_inst = 0.\n self.metrics.reset_stats()", "def reset(self) -> None:\n self.current = 0\n self.num_cuts = 0", "def reset_counter(self):\n self.counter = 0\n self.highlight_input()", "def reset_counter(self) -> None:\n self._fail_counter = 0", "def reset(self):\n self.avg = 0\n self.sum = 0\n self.cnt = 0", "def clear(self):\n #for counterName in self.counters:\n # del self.counters[counterName]\n self.counters={}\n self.title=None", "def clear(self):\n self.counts = [{} for _ in range(len(self.counts))]", "def _reset_count(self):\n self._triple_count = 0\n self._error_count = 0\n self._ignored_count = 0", "def reset(self):\n self.reset_count += 1\n self._init_data()", "def reset(self):\n self.val = 0\n self.avg = 0\n self.sum = 0\n self.count = 0", "def reset(self):\n self.total_pulls = 0\n self.total_score = 0\n self.npulls = np.zeros(self.k)\n self.score = np.zeros(self.k)", "def reset(self):\n self.loss = 0\n self.cnt = 0", "def reset(self):\n self.liidx = 0\n self.clidx = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset_axis_counters(self):\n\n self.column_counter = 0\n self.row_counter = 0", "def reset(self):\n self.tot = 0\n self.cnt = [0.0 for _ in range( self.alpha.getLen() )]", "def reset_index(self):\n self.increments = 0", "def reset(self):\n self._total_value = 0.0\n self._count = 0", "def reset(self):\n self._accumulated_time.clear()\n self._hit_count.clear()", "def reset(self) -> None:\n self.statistics = defaultdict(int)", "def reset(self):\n self.num_inst = 0\n self.sum_metric = 0.0", "def resetCount(self):\n self.currentIndex = 0\n self.updateCurrentCommand()", "def reset(self):\n self._idx = 0", "def reset(self) -> None:\n self.true_positives = 0\n self.all_positives = 0", "def reset(self):\n self.count = 0\n self.soft = False\n self.can_double = True\n self.can_split = False\n self.first_card = 0", "def clear():\n\t\tModel.counter = 0", "def reset_counter(self):\n if hasattr(self._id_generator, \"reset_counter\"):\n self._id_generator.reset_counter()", "def reset(self):\n self.c_count = 0\n self.a_count = -1\n self.epsilon = self.init_epsilon", "def clear(self):\n self.counts = [0] * len(self.values)\n if HAS_NUMPY:\n self.counts = numpy.array(self.counts)", "def reset(self):\n self._current_index = 0", "def reset(self):\n self.start_times = {}\n self.stats = defaultdict(OnlineMeter) # float defaults to 0", "def reset(self) -> None:\n self.counterpoint = self.counterpoint[0:1]\n self.__initialize_piano_roll()\n self.__set_defaults_to_runtime_variables()", "def reset(self):\n self.complete_misses = 0\n return", "def reset(self):\r\n\t\tself.index = 0", "def reset(self):\n self.stats = {}", "def stats_reset(self):\n self.stats.reset()", "def stats_reset(self):\n self.stats.reset()", "def reset(self):\n self.damage_dealt = 0\n self.kills = 0\n self.got_killed = False\n self.fitness = 0", "def reset(self):\n self.table[:, :] = 0\n self.counts[:] = 0\n self.names = []\n self.hashesperid.resize(0)\n self.dirty = True", "def reset():", "def reset():", "def reset():", "def reset(self) -> List[int]:", "def reset(self):\n self.accumulation = None", "def reset(self):\n self.accumulation = None", "def reset(self):\n self._lastRoll = None\n self._initialSum = 0\n self._rollCount = 0", "def reset(self):\n self.test = 0\n self.pos = 0", "def reset(self):\n self.test = 0\n self.pos = 0", "def reset(self):\n self._open_activity_count = 0\n self._decisions = []\n self._tasks = TaskRegistry()", "def reset(self):\n self.restart()\n self.cycles = 0", "def reset(self):\r\n self.buffer = np.zeros(self.nBins)\r\n self.counter = 0", "def reset(self):\n self.algo_state = {}\n self.actual_repetitions = 0\n self.next_session = -1\n self.last_session = -1\n self.past_quality = []", "def reset(self):\n for i in range(0, len(self.current_state)):\n self.current_state[i] = 0\n\n for i in range(0, len(self.weights)):\n self.weights[i] = 0", "def testCounter():\n c = Counter()\n print(\"Expect 0: \", c)\n for i in range(5):\n c.increment()\n print(\"Expect 5: \", c)\n c.reset()\n print(\"Expect 0: \", c)", "def reset():\n global counter, total_attempts, successful_stops\n timer.stop()\n counter = 0\n total_attempts = 0\n successful_stops = 0", "def resetOperationCount():\n global _operationCount\n _countLock.acquire()\n try:\n _operationCount = 0\n finally:\n _countLock.release()", "def reset(self) -> None:\n self.statistics = defaultdict(float)", "def reset(self):\n self.cumtime = 0\n self.start_time = self.time()", "def reset(self):\n self.last_round = False\n self.last_player = None\n self.scores = [0] * self.num_players\n self.current_player = 0\n self.turn = 0\n self.roll = None", "def reset(self) -> List[int]:\n pass", "def reset(self):\n self.count = 0\n self.barrier.acquire()\n self.is_set = True", "def reset(self):\n self._value_estimates[:] = self.prior\n self.action_attempts[:] = 0\n self.last_action = None\n self.t = 0", "def reset_counters(cls):\n for field in cls.__fields__:\n if field.should_auto_increment:\n cls._reset_counter(*field.get_counter({}))", "def reset_stats() -> None:\n STATS[\"cleaned\"] = 0\n STATS[\"null\"] = 0\n STATS[\"unknown\"] = 0", "def reset(self):", "def reset(self):", "def reset(self):", "def reset(self):", "def __reset(self):\n\t\tself.__highest = -float('inf')\n\t\tself.__lowest = float('inf')\n\t\tself.__total = 0\n\t\tself.__steps = 0\n\t\tself.__cold_days = 0", "def clear(self):\n self.sum_hit_at_one = 0.0\n self.sum_perr = 0.0\n self.sum_loss = 0.0\n self.map_calculator.clear()\n self.global_ap_calculator.clear()\n self.num_examples = 0", "def reset(self):\n ...", "def reset(self):\n ...", "def reset_records(self):\n self.score_record = []\n self.score_window = deque(maxlen=100)", "def reset_state(self):\n for name in self.metrics:\n self.metrics[name].reset_state()", "def reset(self):\n self.__sets = []\n self._computed = False", "def resetWriteCount(self):\n self.writeCount = 0", "def reset(self):\n\t\tself.pos = self.start\n\n\t\tself.weighted_n_left = 0.0\n\t\tself.weighted_n_right = self.weighted_n_node_samples\n\n\t\tself.label_count_left \t= np.zeros(self.n_classes)\n\t\tself.label_count_right \t= np.copy(self.label_count_total)", "def reset(self):\n\n self._begin = 0\n self._end = 0\n self._size = 0", "def reset(self):\n self.memory.clear()\n self.relative_base = 0\n self.input_queue.clear()\n self.instr_idx = 0", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self):\r\n store = get_store()\r\n nbval = store.get('Nbtimecompound')[\"value\"]\r\n for i in range(1, nbval):\r\n self.del_line(1)", "def reset(self):\n \n pass", "def reset(self):\n self.sum = [0.] * len(self.topk)\n self.data_num = 0\n self.pfm = [0.] * len(self.topk)", "def reset():\r\n pass", "def reset(self):\n\t\tself._initial = None\n\t\tself._start = None\n\t\tself._time = 0\n\t\tself._total = 0\n\t\treturn self", "def reset(self):\n self.visited = False\n self.calculated = False\n self.past_value = self.value\n self.value = 0", "def reset(self):\n self.score = None\n self.true = None\n self.meta = None", "def reset(self):\n self.ref_value = 0.0\n self._average = 0.0\n self.num_samples = 0" ]
[ "0.8521774", "0.8300679", "0.8268725", "0.8260296", "0.8152586", "0.7961257", "0.77489495", "0.7595029", "0.75843966", "0.7528949", "0.7459658", "0.7458322", "0.7425785", "0.74207324", "0.7398623", "0.7388024", "0.7376379", "0.73507553", "0.735041", "0.73476917", "0.7314992", "0.7312663", "0.7312663", "0.7312663", "0.73077816", "0.7274049", "0.72732776", "0.72473824", "0.72459626", "0.7221236", "0.7187415", "0.71809196", "0.71794045", "0.7172304", "0.7163506", "0.7155809", "0.7126448", "0.7097511", "0.7096121", "0.70679486", "0.7061986", "0.70522535", "0.70409626", "0.7002257", "0.70017004", "0.6996456", "0.6996456", "0.6977844", "0.6977339", "0.6976065", "0.6976065", "0.6976065", "0.6970108", "0.6968219", "0.6968219", "0.6965619", "0.69560117", "0.69560117", "0.6934722", "0.69230855", "0.69192976", "0.6913952", "0.69067335", "0.688812", "0.6883295", "0.68827754", "0.6868274", "0.6867342", "0.68629414", "0.6847889", "0.6845222", "0.68441147", "0.68221366", "0.6820234", "0.6766873", "0.6766873", "0.6766873", "0.6766873", "0.675643", "0.6734442", "0.6727666", "0.6727666", "0.67192984", "0.6711491", "0.67033476", "0.6690444", "0.6684483", "0.66839075", "0.6668647", "0.66666657", "0.66666657", "0.66666657", "0.6664552", "0.66636735", "0.66510224", "0.6644284", "0.6628452", "0.6611789", "0.66082793", "0.6605247" ]
0.69865745
47
resets precision and recall
def reset(self) -> None: self.precision.reset() self.recall.reset()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _set_precision(self, precision) :\n self.__precision = self.parent().action().filter(precision)", "def change_precision(self, precision):\n if precision <= 0:\n print(\"Precision must be positive\")\n else:\n self.precision = precision\n self.input_equation(self.equation)\n print(f'Precision: {precision}')", "def _set_precision(self, precision) :\n self.__precision = self.parent().monoid().filter(precision)", "def recall(self):\n self.overall_recall = recall_score(\n self.y_true, self.y_pred, average = self.average_type).round(self.digits_count_fp)\n self.classes_recall = recall_score(\n self.y_true, self.y_pred, average = None).round(self.digits_count_fp)", "def reset(self):\n\n self.scaler = None\n self.isFitted = False\n self.__create_scaler()", "def reset(self):\n self.total_pulls = 0\n self.total_score = 0\n self.npulls = np.zeros(self.k)\n self.score = np.zeros(self.k)", "def reset(self):\n self.epsilon = self.start", "def reset(self):\n self._proportional = 0\n self._integral = 0\n self._derivative = 0\n\n self._last_time = self._current_time()\n self._last_output = None\n self._last_input = None", "def _truncate_in_place(self, precision) :\n precision = self.parent().action().filter(precision)\n nprec = min(self.precision(), precision)\n\n if nprec != self.precision() :\n for c in self.__coefficients :\n d = self.__coefficients[c]\n for k in d.keys() :\n if not k in nprec :\n del d[k]\n \n self._set_precision(nprec)", "def clear(self):\n self._baseline = 0\n self._sensitivity_im = 0\n self._is_update = False", "def _truncate_in_place(self, precision) :\n precision = self.parent().monoid().filter(precision)\n nprec = min(self.precision(), precision)\n\n if nprec != self.precision() :\n coefficients = self.__coefficients\n for k in coefficients.keys() :\n if not k in nprec :\n del coefficients[k]\n \n self._set_precision(nprec)", "def precision(self):\n self.overall_precision = precision_score(\n self.y_true, self.y_pred, average = self.average_type).round(self.digits_count_fp)\n self.classes_precision = precision_score(\n self.y_true, self.y_pred, average = None).round(self.digits_count_fp)", "def reset(self):\n self.epsilon = self.epsilon_start", "def reset(self):\n self.pred_classes.clear()\n self.gold_classes.clear()\n self.pred_probas.clear()\n self.gold_probas.clear()\n self.loss = 0\n self.nb_batches = 0\n self.prec_rec_f1 = None\n self.acc = None\n self.mcc = None", "def reset(self):\n self.ref_value = 0.0\n self._average = 0.0\n self.num_samples = 0", "def _reset(self):\n\n # Checking one attribute is enough, becase they are all set together\n # in partial_fit\n if hasattr(self, 'scale_'):\n del self.scale_", "def _reset(self):\n\n # Checking one attribute is enough, becase they are all set together\n # in partial_fit\n if hasattr(self, 'scale_'):\n del self.scale_", "def _reset(self):\n\n # Checking one attribute is enough, because they are all set together\n # in partial_fit\n if hasattr(self, 'scale_'):\n del self.scale_\n del self.n_samples_seen_\n del self.mean_\n del self.var_", "def setImpliedPrecision(self, precision_state):\n self.__typeid.setImpliedPrecision(precision_state)\n for ii in self.__parameters:\n ii.setImpliedPrecision(precision_state)", "def _reset(self):\n if self.mode not in ['auto', 'min', 'max']:\n logging.warning('Learning Rate Plateau Reducing mode %s is unknown, '\n 'fallback to auto mode.', self.mode)\n self.mode = 'auto'\n if (self.mode == 'min' or\n (self.mode == 'auto' and 'acc' not in self.monitor)):\n self.monitor_op = lambda a, b: np.less(a, b - self.min_delta)\n self.best = np.Inf\n else:\n self.monitor_op = lambda a, b: np.greater(a, b + self.min_delta)\n self.best = -np.Inf\n self.cooldown_counter = 0\n self.wait = 0", "def _reset(self):\n\n # Checking one attribute is enough, becase they are all set together\n # in partial_fit\n if hasattr(self, 'scale_'):\n del self.scale_\n del self.mean_\n del self.var_", "def _reset(self):\n if self.mode not in ['auto', 'min', 'max']:\n warnings.warn('Learning Rate Plateau Reducing mode %s is unknown, '\n 'fallback to auto mode.' % (self.mode),\n RuntimeWarning)\n self.mode = 'auto'\n if (self.mode == 'min' or\n (self.mode == 'auto' and 'acc' not in self.monitor)):\n self.monitor_op = lambda a, b: np.less(a, b - self.min_delta)\n self.best = np.Inf\n else:\n self.monitor_op = lambda a, b: np.greater(a, b + self.min_delta)\n self.best = -np.Inf\n self.cooldown_counter = 0\n self.wait = 0", "def reset_score(self):\n self.x_score = 0\n self.o_score = 0", "def reset(self):\n self.error_p = 0.0\n self.error_i = 0.0\n self.error_d = 0.0\n self.errors = [ 0.0 ] * self.samples\n if callable(self.debug_callback):\n self.debug_callback(\"reset\")", "def reset(self):\n self.calc1.reset()\n self.calc2.reset()\n self.calc3.reset()\n self.calc4.reset()\n self.calc5.reset()\n self.calc6.reset()\n self.calc7.reset()\n self.calc8.reset()\n self.calc9.reset()\n self.calc10.reset()", "def reset(self):\n self.sum = [0.] * len(self.topk)\n self.data_num = 0\n self.pfm = [0.] * len(self.topk)", "def precision_changed(self, new_precision):\n super(PyDMSpinbox, self).precision_changed(new_precision)\n self.setDecimals(self.precision)", "def reset(self) -> None:\n self.true_positives = 0\n self.actual_positives = 0", "def reset_score(self):\n self._score = p.params['initial_score']", "def reset(self):\n self.c_count = 0\n self.a_count = -1\n self.epsilon = self.init_epsilon", "def _reset(self):\n if self.mode not in ['auto', 'min', 'max']:\n warnings.warn(\n 'Learning rate reduction mode %s is unknown, '\n 'fallback to auto mode.' % self.mode\n )\n self.mode = 'auto'\n if self.mode == 'min' or (\n self.mode == 'auto' and 'acc' not in self.monitor\n ):\n self.monitor_op = lambda a, b: np.less(a, b - self.min_delta)\n self.best = np.Inf\n else:\n self.monitor_op = lambda a, b: np.greater(a, b + self.min_delta)\n self.best = -np.Inf\n self.cooldown_counter = 0\n self.wait = 0", "def reset(self):\n self.correct_count = 0\n self.total_count = 0", "def reset(self):\n super().reset()\n self.sample_count = 1\n self.miss_prob = 1.0\n self.miss_std = 0.0\n self.miss_prob_sd_min = float(\"inf\")\n self.miss_prob_min = float(\"inf\")\n self.miss_sd_min = float(\"inf\")", "def reset(self):\n self.current_exposure = None\n self.scores = {}", "def reset(self):\n self.baseline = None\n self.cut = None\n self.manual_push = 0", "def reset(self):\n self.test = 0\n self.pos = 0", "def reset(self):\n self.test = 0\n self.pos = 0", "def reset(self):\n self._total_value = 0.0\n self._count = 0", "def reset(self):\n self.num_inst = 0\n self.sum_metric = 0.0", "def reset():", "def reset():", "def reset():", "def reset(self):\n self.avg = 0\n self.sum = 0\n self.cnt = 0", "def reset(self):\n self.tot = 0\n self.cnt = [0.0 for _ in range( self.alpha.getLen() )]", "def _reset(self, new_base_lr=None, new_max_lr=None, new_step_size=None):\n if new_base_lr is not None:\n self.base_lr = new_base_lr\n if new_max_lr is not None:\n self.max_lr = new_max_lr\n if new_step_size is not None:\n self.step_size = new_step_size\n self.clr_iterations = 0.0", "def reset(self):\n self.val = 0\n self.avg = 0\n self.sum = 0\n self.count = 0", "def print_precision_recall(classifier, test_set):\n known_set = collections.defaultdict(set)\n computed_set = collections.defaultdict(set)\n \n for i, (features, label) in enumerate(test_set):\n known_set[label].add(i)\n predicted = classifier.classify(features)\n computed_set[predicted].add(i)\n\n print('pos precision:', precision(known_set['pos'], computed_set['pos']))\n print('pos recall:', recall(known_set['pos'], computed_set['pos']))\n print('neg precision:', precision(known_set['neg'], computed_set['neg']))\n print('neg recall:', recall(known_set['neg'], computed_set['neg']))", "def reset(self, runs):\n\n self.answer_wrong = 0\n self.answer_right = 0\n self.train_new(runs)", "def reset(self) -> None:\n self.current = 0\n self.num_cuts = 0", "def print_precision(newVal):\n\n global _print_precision\n _print_precision = newVal", "def reset(self):\n self.last_round = False\n self.last_player = None\n self.scores = [0] * self.num_players\n self.current_player = 0\n self.turn = 0\n self.roll = None", "def reset(self):", "def reset(self):", "def reset(self):", "def reset(self):", "def reset(self):\n self.accumulation = None", "def reset(self):\n self.accumulation = None", "def reset(self):\n self.sum_metric = 0.\n self.num_inst = 0.\n self.metrics.reset_stats()", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def SetPrecision(self, *args):\n return _ShapeUpgrade.ShapeUpgrade_Tool_SetPrecision(self, *args)", "def reset(self):\n\n self.rotation = 0\n self.iteration = 0\n self.predictions = []\n self.prediction = 0\n self.current_position = 0\n self.rotation_list = [0]\n self.prediction = 0\n self.initial_adjust = False", "def reset(self):\n super().reset()\n self.m_n = 1\n self.m_num_errors = 0\n self.m_d = 0\n self.m_lastd = 0\n self.m_mean = 0.0\n self.m_std_temp = 0.0\n self.m_m2s_max = 0.0\n self.estimation = 0.0", "def precision_recall_calculator(reactions_removed, reactions_restored):\n \n reactions_removed = set(reactions_removed)\n reactions_restored = set(reactions_restored)\n \n true_positives = reactions_removed.intersection(reactions_restored)\n \n precision = len(true_positives)/len(reactions_restored)\n recall = len(true_positives)/len(reactions_removed)\n \n return precision, recall", "def reset(self) -> None:\n self.true_positives = 0\n self.all_positives = 0", "def reset(self, *args, **kwargs):", "def truncate(self, precision) :\n raise NotImplementedError", "def truncate(self, precision) :\n raise NotImplementedError", "def reset(self):\n self._value_estimates[:] = self.prior\n self.action_attempts[:] = 0\n self.last_action = None\n self.t = 0", "def reset(self):\n ...", "def reset(self):\n ...", "def calc_f1(precision: float, recall: float) -> float:\r\n return 2 * (precision * recall) / (precision + recall)", "def reset(self):\n self.integral = 0.0\n self.previous_error = 0.0", "def reset(self):\n self.cumtime = 0\n self.start_time = self.time()", "def reset(self):\n\n self.elapsed_time = 0", "def precision_recall(y_true, y_prob, ARGS):\n average_precision = average_precision_score(y_true, y_prob)\n if ARGS.graphs:\n precision, recall, _ = precision_recall_curve(y_true, y_prob)\n plt.style.use('ggplot')\n plt.clf()\n plt.plot(recall, precision,\n label='Precision-Recall Curve (Area = %0.3f)' % average_precision)\n plt.xlabel('Recall: P(predicted+|true+)')\n plt.ylabel('Precision: P(true+|predicted+)')\n plt.ylim([0.0, 1.05])\n plt.xlim([0.0, 1.0])\n plt.legend(loc=\"lower left\")\n print(f'Precision-Recall Curve saved to {ARGS.out_directory}/pr.png')\n plt.savefig(f'{ARGS.out_directory}/pr.png')\n else:\n print('Average Precision %0.3f' % average_precision)", "def clr(self):\n self.a = 0.0", "def reset(self):\n self.acc_loss = 0\n self.norm_term = 0", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self):\n self.score = None\n self.true = None\n self.meta = None", "def reset(*args):", "def reset(*args):", "def reset(*args):", "def _reset(self) -> None:", "def _reset(self) -> None:", "def reset(self):\n self.m = normalize(self.m0)\n self.t = 0.0", "def reset(self):\n \n pass", "def f1_score(precision, recall):\n if precision + recall == 0:\n return 0\n return 2 * precision * recall / (precision + recall)", "def reset_metrics(self):\n self.metrics['loss'] = 0.0\n self.metrics['num_tokens'] = 0\n self.metrics['correct_tokens'] = 0\n self.metrics['correct_pred'] = 0\n self.metrics['pred_count'] = 0", "def reset(self, *args, **kwargs):\n ...", "def reset():\r\n pass", "def _reset(self):", "def recall(self):\n tpos, fneg = len(self.__true_positives), len(self.__false_negatives)\n return float(tpos) / (tpos + fneg)", "def reset(self):\n self.pred = None\n self.target = None", "def reset(self):\n self.pred = None\n self.target = None", "def reset(self):\n self._timestep = np.array([0])", "def reset_average(self):\n self._total_time = 0\n self._average_time = 0\n self._calls = 0" ]
[ "0.6554539", "0.6496719", "0.64400053", "0.63610476", "0.6251985", "0.6230156", "0.61648434", "0.61604416", "0.6144796", "0.6111396", "0.6090455", "0.6075918", "0.60632086", "0.6062955", "0.60555625", "0.6028188", "0.6028188", "0.59946746", "0.5988276", "0.5975022", "0.5970604", "0.5965498", "0.5964751", "0.59626085", "0.59610724", "0.59586376", "0.59481734", "0.59334016", "0.59233767", "0.59087974", "0.5894855", "0.5887843", "0.58757055", "0.5874995", "0.58738184", "0.58693933", "0.58693933", "0.58625257", "0.5849377", "0.58485854", "0.58485854", "0.58485854", "0.58472455", "0.58317906", "0.5821073", "0.58117306", "0.5794853", "0.5792994", "0.5787498", "0.578104", "0.5775438", "0.57692426", "0.57692426", "0.57692426", "0.57692426", "0.57594764", "0.57594764", "0.57518995", "0.5743702", "0.5743702", "0.5743702", "0.57381487", "0.5734312", "0.57231015", "0.57020646", "0.5700765", "0.5696856", "0.5696735", "0.5696735", "0.56901866", "0.5686447", "0.5686447", "0.5676778", "0.5675364", "0.56685203", "0.56680995", "0.5667298", "0.5650745", "0.5648248", "0.56470245", "0.56470245", "0.56470245", "0.56452584", "0.56419766", "0.56419766", "0.56419766", "0.5638728", "0.5638728", "0.5632844", "0.5622428", "0.562201", "0.5612609", "0.56112045", "0.56099766", "0.5606285", "0.5600055", "0.5590333", "0.5590333", "0.5583789", "0.55822575" ]
0.81031436
0
resets counters to 0
def reset(self) -> None: self.f1.reset()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reset_counter(self) -> None:", "def reset(self):\n self.counter = 0", "def reset (self):\n self.counter = 0", "def resetCounters(self):\n self.chain.zero_counters()\n counters = self.session.query(Counter).all()\n self.session.query(Counter).delete()", "def reset_count(self):\n self.count = 0", "def reset(self):\n for counterKey in self.counters.keys():\n self.counters[counterKey]=0\n self.title=None # 025 This is a hack of a hack. Trying to find if the counter was reset recently.", "def reset(self):\n for i in range(0, len(self.__counts)):\n self.__counts[i] = 0\n self.__overflow = 0\n self.__total_count = 0\n self.__total_values = 0\n self.__min = None\n self.__max = None", "def reset(self):\n self.correct_count = 0\n self.total_count = 0", "def reset(self):\n self.sum_metric = 0.\n self.num_inst = 0.\n self.metrics.reset_stats()", "def reset(self) -> None:\n self.current = 0\n self.num_cuts = 0", "def reset_counter(self):\n self.counter = 0\n self.highlight_input()", "def reset_counter(self) -> None:\n self._fail_counter = 0", "def reset(self):\n self.avg = 0\n self.sum = 0\n self.cnt = 0", "def clear(self):\n #for counterName in self.counters:\n # del self.counters[counterName]\n self.counters={}\n self.title=None", "def clear(self):\n self.counts = [{} for _ in range(len(self.counts))]", "def _reset_count(self):\n self._triple_count = 0\n self._error_count = 0\n self._ignored_count = 0", "def reset(self):\n self.reset_count += 1\n self._init_data()", "def reset(self):\n self.total_pulls = 0\n self.total_score = 0\n self.npulls = np.zeros(self.k)\n self.score = np.zeros(self.k)", "def reset(self):\n self.val = 0\n self.avg = 0\n self.sum = 0\n self.count = 0", "def reset(self):\n self.loss = 0\n self.cnt = 0", "def reset(self):\n self.liidx = 0\n self.clidx = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset(self):\n self.test = 0\n self.hit = 0", "def reset_axis_counters(self):\n\n self.column_counter = 0\n self.row_counter = 0", "def reset(self):\n self.tot = 0\n self.cnt = [0.0 for _ in range( self.alpha.getLen() )]", "def reset_index(self):\n self.increments = 0", "def reset(self):\n self._total_value = 0.0\n self._count = 0", "def reset(self):\n self._accumulated_time.clear()\n self._hit_count.clear()", "def reset(self) -> None:\n self.statistics = defaultdict(int)", "def reset(self):\n self.num_inst = 0\n self.sum_metric = 0.0", "def reset(self):\n self._idx = 0", "def resetCount(self):\n self.currentIndex = 0\n self.updateCurrentCommand()", "def reset(self) -> None:\n self.true_positives = 0\n self.all_positives = 0", "def reset(self):\n self.count = 0\n self.soft = False\n self.can_double = True\n self.can_split = False\n self.first_card = 0", "def clear():\n\t\tModel.counter = 0", "def reset_counter(self):\n if hasattr(self._id_generator, \"reset_counter\"):\n self._id_generator.reset_counter()", "def reset(self):\n self.c_count = 0\n self.a_count = -1\n self.epsilon = self.init_epsilon", "def clear(self):\n self.counts = [0] * len(self.values)\n if HAS_NUMPY:\n self.counts = numpy.array(self.counts)", "def reset(self):\n self._current_index = 0", "def reset(self):\n self.start_times = {}\n self.stats = defaultdict(OnlineMeter) # float defaults to 0", "def reset(self) -> None:\n self.counterpoint = self.counterpoint[0:1]\n self.__initialize_piano_roll()\n self.__set_defaults_to_runtime_variables()", "def reset(self):\n self.complete_misses = 0\n return", "def reset(self):\r\n\t\tself.index = 0", "def reset(self):\n self.stats = {}", "def stats_reset(self):\n self.stats.reset()", "def stats_reset(self):\n self.stats.reset()", "def reset(self) -> None:\n self.true_positives = 0\n self.actual_positives = 0", "def reset(self):\n self.damage_dealt = 0\n self.kills = 0\n self.got_killed = False\n self.fitness = 0", "def reset(self):\n self.table[:, :] = 0\n self.counts[:] = 0\n self.names = []\n self.hashesperid.resize(0)\n self.dirty = True", "def reset():", "def reset():", "def reset():", "def reset(self) -> List[int]:", "def reset(self):\n self.accumulation = None", "def reset(self):\n self.accumulation = None", "def reset(self):\n self._lastRoll = None\n self._initialSum = 0\n self._rollCount = 0", "def reset(self):\n self.test = 0\n self.pos = 0", "def reset(self):\n self.test = 0\n self.pos = 0", "def reset(self):\n self._open_activity_count = 0\n self._decisions = []\n self._tasks = TaskRegistry()", "def reset(self):\n self.restart()\n self.cycles = 0", "def reset(self):\r\n self.buffer = np.zeros(self.nBins)\r\n self.counter = 0", "def reset(self):\n self.algo_state = {}\n self.actual_repetitions = 0\n self.next_session = -1\n self.last_session = -1\n self.past_quality = []", "def reset(self):\n for i in range(0, len(self.current_state)):\n self.current_state[i] = 0\n\n for i in range(0, len(self.weights)):\n self.weights[i] = 0", "def testCounter():\n c = Counter()\n print(\"Expect 0: \", c)\n for i in range(5):\n c.increment()\n print(\"Expect 5: \", c)\n c.reset()\n print(\"Expect 0: \", c)", "def reset():\n global counter, total_attempts, successful_stops\n timer.stop()\n counter = 0\n total_attempts = 0\n successful_stops = 0", "def resetOperationCount():\n global _operationCount\n _countLock.acquire()\n try:\n _operationCount = 0\n finally:\n _countLock.release()", "def reset(self) -> None:\n self.statistics = defaultdict(float)", "def reset(self):\n self.cumtime = 0\n self.start_time = self.time()", "def reset(self):\n self.last_round = False\n self.last_player = None\n self.scores = [0] * self.num_players\n self.current_player = 0\n self.turn = 0\n self.roll = None", "def reset(self) -> List[int]:\n pass", "def reset(self):\n self.count = 0\n self.barrier.acquire()\n self.is_set = True", "def reset(self):\n self._value_estimates[:] = self.prior\n self.action_attempts[:] = 0\n self.last_action = None\n self.t = 0", "def reset_counters(cls):\n for field in cls.__fields__:\n if field.should_auto_increment:\n cls._reset_counter(*field.get_counter({}))", "def reset_stats() -> None:\n STATS[\"cleaned\"] = 0\n STATS[\"null\"] = 0\n STATS[\"unknown\"] = 0", "def reset(self):", "def reset(self):", "def reset(self):", "def reset(self):", "def __reset(self):\n\t\tself.__highest = -float('inf')\n\t\tself.__lowest = float('inf')\n\t\tself.__total = 0\n\t\tself.__steps = 0\n\t\tself.__cold_days = 0", "def clear(self):\n self.sum_hit_at_one = 0.0\n self.sum_perr = 0.0\n self.sum_loss = 0.0\n self.map_calculator.clear()\n self.global_ap_calculator.clear()\n self.num_examples = 0", "def reset(self):\n ...", "def reset(self):\n ...", "def reset_records(self):\n self.score_record = []\n self.score_window = deque(maxlen=100)", "def reset_state(self):\n for name in self.metrics:\n self.metrics[name].reset_state()", "def reset(self):\n self.__sets = []\n self._computed = False", "def resetWriteCount(self):\n self.writeCount = 0", "def reset(self):\n\t\tself.pos = self.start\n\n\t\tself.weighted_n_left = 0.0\n\t\tself.weighted_n_right = self.weighted_n_node_samples\n\n\t\tself.label_count_left \t= np.zeros(self.n_classes)\n\t\tself.label_count_right \t= np.copy(self.label_count_total)", "def reset(self):\n\n self._begin = 0\n self._end = 0\n self._size = 0", "def reset(self):\n self.memory.clear()\n self.relative_base = 0\n self.input_queue.clear()\n self.instr_idx = 0", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self):\n \n pass", "def reset(self):\r\n store = get_store()\r\n nbval = store.get('Nbtimecompound')[\"value\"]\r\n for i in range(1, nbval):\r\n self.del_line(1)", "def reset(self):\n self.sum = [0.] * len(self.topk)\n self.data_num = 0\n self.pfm = [0.] * len(self.topk)", "def reset():\r\n pass", "def reset(self):\n\t\tself._initial = None\n\t\tself._start = None\n\t\tself._time = 0\n\t\tself._total = 0\n\t\treturn self", "def reset(self):\n self.visited = False\n self.calculated = False\n self.past_value = self.value\n self.value = 0", "def reset(self):\n self.score = None\n self.true = None\n self.meta = None", "def reset(self):\n self.ref_value = 0.0\n self._average = 0.0\n self.num_samples = 0" ]
[ "0.8522219", "0.8301985", "0.8270119", "0.82612026", "0.81530696", "0.7962359", "0.7749801", "0.75962293", "0.7585864", "0.75297916", "0.74603754", "0.7459489", "0.7426602", "0.7421175", "0.7398909", "0.73889565", "0.7378627", "0.7351837", "0.7351327", "0.7349136", "0.7317789", "0.73143846", "0.73143846", "0.73143846", "0.7309193", "0.7274728", "0.7274077", "0.72478443", "0.7247323", "0.72226703", "0.718841", "0.7181822", "0.71816057", "0.71736586", "0.71651375", "0.71567965", "0.71277523", "0.7099073", "0.7095955", "0.70699215", "0.7063472", "0.70532256", "0.7041536", "0.70040137", "0.70036745", "0.699846", "0.699846", "0.69877017", "0.6979502", "0.69794476", "0.69784313", "0.69784313", "0.69784313", "0.6971098", "0.6969234", "0.6969234", "0.69661105", "0.6958027", "0.6958027", "0.6937", "0.6924717", "0.6920337", "0.69155186", "0.69089454", "0.6887569", "0.688417", "0.68837166", "0.68694824", "0.6868667", "0.68643135", "0.6849264", "0.6846356", "0.68454283", "0.68231153", "0.68219864", "0.6769134", "0.6769134", "0.6769134", "0.6769134", "0.6757061", "0.67357075", "0.673024", "0.673024", "0.67199314", "0.67138", "0.67059475", "0.6691127", "0.6686006", "0.6685858", "0.66710925", "0.6668975", "0.6668975", "0.6668975", "0.66663545", "0.6665713", "0.6652416", "0.6646904", "0.66296774", "0.66126275", "0.6610137", "0.6606228" ]
0.0
-1
Checks whether the array lies within the bounds Parameter
def __call__(self, value: np.ndarray) -> bool: for k, bound in enumerate(self.bounds): if bound is not None: if np.any((value > bound) if k else (value < bound)): return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _in_bounds(self, x, y):\r\n return 0 <= x < 8 and 0 <= y < 8", "def in_bounds(self, x, y):\n return x >= 0 and x < 8 and y >= 0 and y < 8", "def check_bounds(self, index):\n if index < self.lower_bound or index > self.upper_bound:\n return False\n return True", "def check_bounds(self, row: int, col: int) -> bool:\n return 0 <= row < self.row and 0 <= col < self.col", "def has_bounds(self):\r\n bounds = self.bounds\r\n if bounds in (None, [None, None]):\r\n return False\r\n for i in xrange(bounds[0]):\r\n if bounds[0][i] is not None and bounds[0][i] > -np.inf:\r\n return True\r\n for i in xrange(bounds[1]):\r\n if bounds[1][i] is not None and bounds[1][i] < np.inf:\r\n return True\r\n return False", "def checkWithinBound(rowWithinBound,colWithinBound):\n if(rowWithinBound == 0 and colWithinBound == 0):\n return True\n else:\n return False", "def boundary_check(limits : tuple, coords : tuple) -> bool:\n xl,xh,yl,yh = limits\n x,y = coords\n bound_x = xl <= x and x < xh\n bound_y = yl <= y and y < yh\n return bound_x and bound_y", "def in_range(x, y):\n if (x < 0 or x > width or y < 0 or y > length):\n return False\n else:\n return True", "def in_bounds(self, position):\n row, col = position\n return ((row >= 0 and row < self.height) and\n (col >= 0 and col < self.width))", "def in_bounds(self, location: tuple) -> bool:\n return 0 <= min(location) and max(location) <= 7", "def check_bounds (position, size):\n \n for item in position:\n # checks whether item is out of bounds\n if item < 0 or item >= size:\n return False\n return True", "def _inside_bounds(A, B):\n for axis in 'xyz':\n minA, maxA = axis_bounds(A, axis)\n minB, maxB = axis_bounds(B, axis)\n if (minA <= minB) or (maxA >= maxB):\n return False\n\n return True", "def check_bounds(x, param_name):\n for i in range(len(x)):\n if ((xmin[param_name][i] is not None and x[i] < xmin[param_name][i]) or\n (xmax[param_name][i] is not None and x[i] > xmax[param_name][i])):\n return False\n return True", "def inBounds(self, px, py):\n return px >= 0 and py >= 0 and px < self.w and py < self.h", "def inBounds(self,pos):\n return ((pos.x<WIDTH) & (pos.x>=0) & (pos.y<HEIGHT) & (pos.y>=0))", "def out_of_bounds(self):\n return self.rect.right <= 0", "def check(self):\n self.lower_bound(5e-4)\n self.upper_bound(5e2)", "def in_bounds(state, map_shape):\n return 0 <= state[0] < map_shape[0] and 0 <= state[1] < map_shape[1]", "def in_bounds(self, point):\n # Sanity checks\n # Check that point has same number of dimensions as graph\n if not len(point) == len(self.dimensions):\n raise Exception(\"Point has \" + str(len(point)) + \" dimensions, Coordination Space has \" + \\\n str(len(self.dimensions)) + \" dimensions.\")\n\n for i, coordinate in enumerate(point):\n if coordinate > self.dimensions[i] or coordinate < 0:\n return False\n\n return True", "def _check_bound(self, q):\n mat = ur_utils.forward(q, self._ik_params)\n xyz = mat[:3, 3]\n inside_bound = np.all(self._end_effector_low <= xyz) and np.all(xyz <= self._end_effector_high)\n inside_buffer_bound = (np.all(self._end_effector_low + self._box_bound_buffer <= xyz) and \\\n np.all(xyz <= self._end_effector_high - self._box_bound_buffer))\n return inside_bound, inside_buffer_bound, mat, xyz", "def out_of_bounds(self):\n return not 0 <= self.nodes[0].x < WIDTH * SCALE or not 0 <= self.nodes[0].y < HEIGHT * SCALE", "def IsBound(self) -> bool:", "def __bounds_check(self, *wavelengths: float):\n lowerb = self.spectrum[:, 0][0]\n upperb = self.spectrum[:, 0][-1]\n # See if the wavelength(s) is out of bounds, throw error\n for w in wavelengths:\n if not lowerb <= w <= upperb:\n print(\"Wavelength %0.2f nm out of spectra bounds\" % w)\n if w < lowerb:\n raise IndexError(\"Please use the lower bound of %0.2f nm.\" % lowerb)\n elif w > upperb:\n raise IndexError(\"Please use the upper bound of %0.2f nm.\" % upperb)\n else:\n pass\n return", "def pos_within_bounds(position):\n if type(position[0]) == int:\n row,col = position\n else:\n col,row = position\n \n if not 1<=row<=8:\n return False\n if not 65<=ord(col)<=72:\n return False\n return True", "def check_boundary(self, width, height):\r\n if 0 <= self.head[0] + self.direction[0]*10 <= width - 10 and 0 <= self.head[1] + self.direction[1]*10 <= height - 10:\r\n return True\r\n else:\r\n return False", "def check_coord_in_range(self, x, y):\n return 0 <= x < self.cols and 0 <= y < self.lines", "def check_range_value(array, min_=None, max_=None):\n # check lowest and highest bounds\n if min_ is not None and array.min() < min_:\n raise ValueError(\"The array should have a lower bound of {0}, but its \"\n \"minimum value is {1}.\".format(min_, array.min()))\n if max_ is not None and array.max() > max_:\n raise ValueError(\"The array should have an upper bound of {0}, but \"\n \"its maximum value is {1}.\".format(max_, array.max()))\n\n return True", "def detect_in_bounds(self):\n creature_x, creature_y = self.creature.current_location\n if creature_x < 0 or creature_x >= self.world_width\\\n or creature_y < 0 or creature_y >= self.world_height:\n print('The creature is out of bounds!')\n return False\n return True", "def __is_valid(self, subscript):\n return ((0,0) <= subscript and subscript < self.size)", "def is_inside_bounds(point, bounds):\n if isinstance(point, (int, float)):\n point = [point]\n if isinstance(point, (np.ndarray, collections.abc.Sequence)) and not isinstance(\n point, collections.deque\n ):\n if len(bounds) < 2 * len(point) or len(bounds) % 2 != 0:\n raise ValueError('Bounds mismatch point dimensionality')\n point = collections.deque(point)\n bounds = collections.deque(bounds)\n return is_inside_bounds(point, bounds)\n if not isinstance(point, collections.deque):\n raise TypeError(f'Unknown input data type ({type(point)}).')\n if len(point) < 1:\n return True\n p = point.popleft()\n lower, upper = bounds.popleft(), bounds.popleft()\n if lower <= p <= upper:\n return is_inside_bounds(point, bounds)\n return False", "def out_of_bounds(self):\n return self.rect.right <= 0 or self.rect.left >= self.screen_rect.width", "def checkInBound(self,value,checkEdge):\n assert(checkEdge==0 or checkEdge==1)\n if checkEdge==0: # width\n assert(value>=0 and value<self.w)\n else:\n assert(value>=0 and value<self.h)", "def _check_bounds(self, x_new):\n\n # If self.bounds_error is True, we raise an error if any x_new values\n # fall outside the range of x. Otherwise, we return an array indicating\n # which values are outside the boundary region.\n below_bounds = x_new < self.x[0]\n above_bounds = x_new > self.x[-1]\n\n # !! Could provide more information about which values are out of bounds\n if self.bounds_error and below_bounds.any():\n raise ValueError(\"A value in x_new is below the interpolation \"\n \"range.\")\n if self.bounds_error and above_bounds.any():\n raise ValueError(\"A value in x_new is above the interpolation \"\n \"range.\")\n\n # !! Should we emit a warning if some values are out of bounds?\n # !! matlab does not.\n out_of_bounds = logical_or(below_bounds, above_bounds)\n return out_of_bounds", "def test_contains_bounds(self):\n dim = Dimension(\"yolo\", \"uniform\", -3, 4)\n with pytest.raises(NotImplementedError):\n assert -3 in dim", "def in_box_bounds(self, test_vec):\n above_min = np.greater(test_vec, self.lower_vertex).all()\n below_max = np.greater(self.upper_vertex, test_vec).all()\n return above_min and below_max", "def g_in_bounds(x, lo, hi):\n\n return (x >= lo) and (x <= hi)", "def _point_within_bounds(bounds, p):\n A, B = bounds\n # we have to add epsilon since test against horizontal or vertical\n # lines may fail if the point is off by numerical precision\n eps = 1e-10\n (Ax,Ay), (Bx,By), (px,py)=A,B,p\n return (\n (min((Ax,Bx))-eps<=px<=max((Ax,Bx))+eps) and\n (min((Ay,By))-eps<=py<=max((Ay,By))+eps)\n )", "def bounds_check(session):\n\n max_ = session.field.opts.max\n min_ = session.field.opts.min\n\n if max_ is not None and len(session.data) > max_:\n raise session.field.invalid(error_type='out_of_bounds')\n if min_ is not None and len(session.data) < min_:\n raise session.field.invalid(error_type='out_of_bounds')\n\n return session.data", "def out_of_bounds(self):\n return self._parms.get(\"out_of_bounds\")", "def inside_limits(self, point):\n if not self.regions:\n # Use rectangle check\n lat, lon = point.latitude, point.longitude\n if (lon > self.limits[0] and lat > self.limits[1] and\n lon < self.limits[2] and lat < self.limits[3]):\n return True\n else:\n return False\n else:\n # Check inside all possible regions\n p = Point((point.longitude, point.latitude))\n print(p, point)\n # import IPython; IPython.embed()\n for name, poly in self.regions.items():\n # if poly.contains(p):\n if p.intersects(poly):\n return name\n return False", "def _inside_op_range(self, idx):\n\n if idx < self._parameters.op_range[0]:\n return False\n return (self._parameters.op_range[1] < 0 or\n idx <= self._parameters.op_range[1])", "def is_in_bounds(pos):\n return PLAYFIELD_PADDING[0] < pos[0] < PLAYFIELD_PADDING[0] +\\\n BLOCK_NUM_WIDTH * Block.WIDTH and PLAYFIELD_PADDING[1] < pos[1] <\\\n PLAYFIELD_PADDING[1] + BLOCK_NUM_HEIGHT * Block.HEIGHT", "def bounds(self, pos):", "def is_out_of_bounds(img_height: int, img_width: int, x: float, y: float, patch_size: int) -> bool:\n patch_half_size_floored = patch_size // 2\n x_low = x - patch_half_size_floored\n x_high = x + patch_half_size_floored\n y_low = y - patch_half_size_floored\n y_high = y + patch_half_size_floored\n\n return x_low < 0 or x_high > img_width or y_low < 0 or y_high > img_height", "def contains(self, x):\n return (isinstance(x, int) and x >= 0 and x < self._dim)", "def valid_coordinates(self, x, y):\n return ((x >= 0) and (x < self.width) and\n (y >= 0) and (y < self.height))", "def isoutofbounds(indices, dims):\n indices = np.asarray(indices)\n dims = np.asarray(dims)\n z = np.zeros_like(dims)\n return np.any(np.logical_or(indices < z, indices >= dims), -1)", "def check(self, parameters):\n if np.any(parameters < self._lower):\n return False\n if np.any(parameters > self._upper):\n return False\n return True", "def covers_overlaps(self, bounds):\n bounds = tuple(float(b) for b in bounds)\n return self.numba_rtree.covers_overlaps(bounds)", "def in_geo_limits(args: argparse.Namespace, track_data: dict) -> bool:\n return (track_data['boundaries']['north'] <= args.north_lim and\n track_data['boundaries']['south'] >= args.south_lim and\n track_data['boundaries']['east'] <= args.east_lim and\n track_data['boundaries']['west'] >= args.west_lim)", "def __contains__(self, x: ArrayLike) -> bool:\n\n return bool(\n np.all(\n np.where(\n np.logical_and(\n x >= np.min(self._domain), # pyright: ignore\n x <= np.max(self._domain), # pyright: ignore\n ),\n True,\n False,\n )\n )\n )", "def isRangeValid(self) -> bool:\n ...", "def validate_position(position: Tuple[int, int], bound: int) -> bool:\n if position[0] < 0 or position[0] >= bound:\n return False\n if position[1] < 0 or position[1] >= bound:\n return False\n return True", "def is_bound(self, point):\n return self.__begin == point or self.__end == point", "def box_in_range(self,x,y,z,d, x_range, y_range, z_range, d_range):\n return np.logical_and.reduce((\n x > x_range[0], x < x_range[1],\n y > y_range[0], y < y_range[1],\n z > z_range[0], z < z_range[1],\n d > d_range[0], d < d_range[1]))", "def checkranges(self, var, name):\r\n\r\n # reshape var\r\n assert len(var) == 2, \"%s must have two elements,\" % name\r\n var = np.array([float(v) for v in var])\r\n\r\n # check values\r\n if name in [\"arange\", \"Rprange\", \"Mprange\"]:\r\n assert np.all(var > 0), \"%s values must be strictly positive\" % name\r\n if name in [\"erange\", \"prange\"]:\r\n assert np.all(var >= 0) and np.all(var <= 1), (\r\n \"%s values must be between 0 and 1\" % name\r\n )\r\n\r\n # the second element must be greater or equal to the first\r\n if var[1] < var[0]:\r\n var = var[::-1]\r\n\r\n return var", "def test_contains_extra_bounds(self):\n dim = Real(\"yolo\", \"norm\", 0, 3, low=-3, high=+3)\n assert dists.uniform.rvs(-3, 3) in dim\n assert -4 not in dim\n assert +4 not in dim\n assert (1, 2) not in dim", "def test_out_of_bounds(self) -> None:\n\n self.assertIsInstance(self.movement.out_of_bounds(self.pop.get_person(),\n np.array([[0,1]] * 10),np.array([[0,1]] * 10)), np.ndarray)\n self.pop.persons[:,idx.speed] = 1\n self.pop.persons[:,idx.x_axis] = 1.1\n self.pop.persons[:,idx.y_axis] = 1.1\n self.pop.persons[:,idx.x_dir] = 0.5\n self.pop.persons[:,idx.y_dir] = 0.5\n\n self.assertLess(list(self.movement.out_of_bounds(self.pop.get_person(),\n np.array([[0,1]] * 10),np.array([[0,1]] * 10))[:,idx.x_dir]), [0]*10)\n self.assertLess(list(self.movement.out_of_bounds(self.pop.get_person(),\n np.array([[0,1]] * 10),np.array([[0,1]] * 10))[:,idx.x_dir]), [0]*10)\n\n self.pop.persons[:,idx.x_axis] = -0.1\n self.pop.persons[:,idx.y_axis] = -0.1\n self.pop.persons[:,idx.x_dir] = -0.5\n self.pop.persons[:,idx.y_dir] = -0.5\n self.assertGreater(list(self.movement.out_of_bounds(self.pop.get_person(),\n np.array([[0,1]] * 10),np.array([[0,1]] * 10))[:,idx.x_dir]), [0]*10)\n self.assertGreater(list(self.movement.out_of_bounds(self.pop.get_person(),\n np.array([[0,1]] * 10),np.array([[0,1]] * 10))[:,idx.x_dir]), [0]*10)", "def _is_valid_land(x, y, grid):\n return (x >= 0) and (x < len(grid)) and (y >= 0) and (y < len(grid[0])) and grid[x][y]", "def point_into_bound(bounds, point, array_name):\n\theight, width = bounds #get ndarray shape\n\tx, y = point #get the components of stamp_point\n\t#validate in x\n\tassert (x >= 0 and x < width), \"out of {} bounds in x axis in stamp_point\".format(array_name)\n\t#validate in y\n\tassert (y >= 0 and y < height), \"out of {} bounds in y axis in stamp_point\".format(array_name)", "def is_valid(array, index):\n row, column = index\n return 0 <= row < len(array) and 0 <= column < len(array[row])", "def checkBounds(x,y,z,center,radius):\n r2 = (x-center[0])**2 + (y-center[1])**2# + (z-center[0])**2\n if r2 < radius**2:\n return True\n else:\n return False", "def inBounds(self, coordMin, coordMax=None):\n if coordMax == None:\n coordMax = coordMin\n coordMin = Coordinate(AxisDistance(), AxisDistance())\n\n xMin = coordMin.x\n xMax = coordMax.x\n yMin = coordMin.y\n yMax = coordMax.y\n inX = self.x.inRange(xMin, xMax)\n inY = self.x.inRange(yMin, yMax)\n return inX & inY", "def in_bound(dim , s):\n if s <= -1:\n return 0\n elif s >= dim:\n return dim - 1\n else:\n return s", "def check_allowed_positions(scan, psi, probe_shape):\n int_scan = scan // 1\n less_than_one = int_scan < 1\n greater_than_psi = np.stack(\n (int_scan[..., -2] >= psi.shape[-2] - probe_shape[-2],\n int_scan[..., -1] >= psi.shape[-1] - probe_shape[-1]),\n -1,\n )\n if np.any(less_than_one) or np.any(greater_than_psi):\n x = np.logical_or(less_than_one, greater_than_psi)\n raise ValueError(\"These scan positions exist outside field of view:\\n\"\n f\"{scan[np.logical_or(x[..., 0], x[..., 1])]}\")", "def hit_wall(s):\n if s == [1, 1]: # We would enter the None-field\n return True\n elif s[0] < 0 or s[0] > 2 or s[1] < 0 or s[1] > 3: # We would be out of bounds\n return True\n else:\n return False", "def valid(self, pos):\n\t\tpos = Point(pos)\n\t\treturn 0 <= pos.x < self.dims.width and 0 <= pos.y < self.dims.height", "def _in_box(self, point, extent):\n return ((point[0] >= extent[0]) and\n (point[0] <= extent[1]) and\n (point[1] >= extent[2]) and\n (point[1] <= extent[3]))", "def check_extent(self):\n if self.lower_left.x > self.upper_right.x:\n dlx = self.lower_left.x\n self.lower_left.x = self.upper_right.x\n self.upper_right.y = dlx\n\n if self.lower_left.y > self.upper_right.y:\n dly = self.lower_left.y\n self.lower_left.y = self.upper_right.y\n self.upper_right.y = dly", "def check_interval_bounds(begin, end):\n if begin.get_midpoint() >= end.get_midpoint():\n return False\n\n if begin.get_radius() is not None and end.get_radius() is not None:\n if begin.get_midpoint() - begin.get_radius() > \\\n end.get_midpoint() - end.get_radius():\n return False\n\n return True", "def _check_bounds(self,val):\n if not (isinstance(val,list)):\n raise ValueError(\"List '%s' must be a list.\"%(self._attrib_name))\n\n if self.bounds is not None:\n min_length,max_length = self.bounds\n l=len(val)\n if min_length is not None and max_length is not None:\n if not (min_length <= l <= max_length):\n raise ValueError(\"%s: list length must be between %s and %s (inclusive)\"%(self._attrib_name,min_length,max_length))\n elif min_length is not None:\n if not min_length <= l: \n raise ValueError(\"%s: list length must be at least %s.\"%(self._attrib_name,min_length))\n elif max_length is not None:\n if not l <= max_length:\n raise ValueError(\"%s: list length must be at most %s.\"%(self._attrib_name,max_length))\n\n self._check_type(val)", "def check_inside(self, person):\n p_top_x = person[0] + self.padding\n p_left_y = person[1] + self.padding\n p_bottom_x = person[2] - self.padding\n p_right_y = person[3] - self.padding\n\n return p_top_x >= self.top_x and p_left_y >= self.left_y and p_bottom_x <= self.bottom_x \\\n and p_right_y <= self.right_y", "def in_rectangle(x, y):\n return ((self.min_x <= x <= self.max_x) and\n (self.min_y <= y <= self.max_y))", "def isInRange(val, minv, maxv):\n\treturn val >= minv and val <= maxv", "def test_output_range(self):\n byt = bytscl(self.array1)\n outside = (byt < 0) | (byt > 255)\n total = numpy.sum(outside)\n self.assertEqual(total, 0)", "def inrange(cc, point):\n return point.row in range(cc.top, cc.bottom+1) and point.col in range(cc.left, cc.right+1)", "def valid_ray(self, row, col):\n # if row nor col is at an edge space, returns False\n if row != 0 and row != 9 and col != 0 and col != 9:\n return False\n # ensures no corner spaces have been selected\n if row == 0 or row == 9:\n if col > 8 or col < 1:\n return False\n if col == 0 or col == 9:\n if row > 8 or row < 1:\n return False\n return True", "def _assert_bounds_are_valid(\n self, new_bounds: devices.PrimaryBounds\n ) -> None:\n correct_length = len(new_bounds) == self.nprimaries\n tuples_of_float = all(\n [\n isinstance(item, tuple) and isinstance(b, (float, np.floating))\n for item in new_bounds\n for b in item\n ]\n )\n in_gamut = all([b[0] >= 0.0 and b[1] <= 1.0 for b in new_bounds])\n\n if not all([correct_length, tuples_of_float, in_gamut]):\n print(self.__class__.bounds.__doc__)\n raise SilSubProblemError(\"Invalid input for bounds (see above).\")", "def point_in_rectangle(point: Vector, rect_min: Vector, rect_max: Vector) -> bool:\n return rect_min[0] <= point[0] <= rect_max[0] and rect_min[1] <= point[1] <= rect_max[1]", "def in_range(data, minval=-np.inf, maxval=np.inf):\n return (minval <= data) & (data <= maxval)", "def _withinRangeChecker(entity, params):\n\n for constraint in constraints:\n type = constraint.get('type')\n field = constraint.get('field')\n\n if not type or not field:\n raise ProtocolError()\n\n min_value = constraint.get('min_value', 0)\n max_value = constraint.get('max_value', 1)\n\n if type == 'size':\n value = entity.__getattribute__(field)\n if len(value) < min_value or len(value) > max_value:\n return False\n else:\n raise ProtocolError()\n \n return True", "def in_pixel_range(self, pixmin: int, pixmax: int) -> bool:\n \n if any(i < pixmin or i > pixmax or np.isnan(i) for i in self.datapos):\n return False\n\n return True", "def assert_bounds(self, pos):\n row, col = pos\n\n if not (row in range(self.BOARD_SIZE) and\n col in range(self.BOARD_SIZE)):\n raise IndexError(\"Cannot place a worker out of board bounds\")", "def is_bound(pos1, el1, pos2, el2):\n threshold = 0.1\n if el1 == 'H' or el2 == 'H':\n threshold = 0.2\n if np.linalg.norm(np.array(pos1) - np.array(pos2)) < covalence_radius[el1] + covalence_radius[el2] + threshold:\n return True\n return False", "def check_limits(self):\n\n #Find the relative position of each leg vs. its \"zero\" position\n relpos = self.fixed_plate - self.fixed_plate_zero\n\n for leg in range(3):\n #Check that the leg is within allowable \"safe zone\"\n #Use the position of the leg (relative to 0) to find the index in the \"safe zone\" matrix\n i_x = nearest_index(self.leg_safe_xaxis, relpos[COORD_X, leg])\n i_z = nearest_index(self.leg_safe_zaxis, relpos[COORD_Z, leg])\n #Look up in the safe zone.\n self.leg_fault[leg] = (not self.leg_safe_zone[leg, i_x, i_z])\n\n if (not all(np.isreal(self.fixed_plate[:, leg]))) or any(np.isnan(self.fixed_plate[:, leg])):\n #A complex or NaN value = the angle found for the leg was invalid, meaning that the\n #leg would have to be longer to reach the desired position.\n self.leg_fault[leg] = True", "def test_bounds_of_threshold_points(self):\n result = Plugin()._add_bounds_to_thresholds_and_probabilities(\n self.threshold_points, self.probabilities_for_cdf, self.bounds_pairing\n )\n self.assertArrayAlmostEqual(result[0][0], self.bounds_pairing[0])\n self.assertArrayAlmostEqual(result[0][-1], self.bounds_pairing[1])", "def coordinates_within_board(n: int, x: int, y: int) -> bool:\n\n return x < n and y < n and x >= 0 and y >= 0", "def find_valid_region(self, spat_rel):\n # The top-down view is transposed.\n if spat_rel == \"above\":\n is_valid = np.vectorize(lambda x: (x > 157.5) or (x <= -157.5))\n elif spat_rel == \"above-right\":\n is_valid = np.vectorize(lambda x: (x > -157.5) and (x <= -112.5))\n elif spat_rel == \"right\":\n is_valid = np.vectorize(lambda x: (x > -112.5 and x <= -67.5))\n elif spat_rel == \"below-right\":\n is_valid = np.vectorize(lambda x: (x > -67.5) and (x <= -22.5))\n elif spat_rel == \"below\":\n is_valid = np.vectorize(lambda x: (x > -22.5) and (x <= 22.5))\n elif spat_rel == \"below-left\":\n is_valid = np.vectorize(lambda x: (x > 22.5) and (x <= 67.5))\n elif spat_rel == \"left\":\n is_valid = np.vectorize(lambda x: (x > 67.5) and (x <= 112.5))\n elif spat_rel == \"above-left\":\n is_valid = np.vectorize(lambda x: (x > 112.5) and (x <= 157.5))\n return is_valid", "def check_masked (self, pos : list,) :\n count = 0\n total = 0\n for x in range(pos[0],min(pos[0] + AUTO_width1, self.m_x)) :\n for y in range(pos[1], min(pos[1] + AUTO_width1, self.m_y)) :\n total += 1\n if self.current_grid[x][y] :\n count += 1\n if count/total > 0.5 :\n return True\n else :\n return False", "def check_boundedness(self):\n if SymEq.check_boundedness(self.aMatrix,\n self.bMatrix,\n self.eqMatrix,\n SymEq.get_var_list(self.raw_expression)):\n return True\n else:\n raise Exception(\"[RectangleSet ERROR]: (Initial) Set NOT Bounded.\")", "def occupied(self, (xIndex, yIndex)):\n return xIndex < 0 or yIndex < 0 or \\\n xIndex >= self.xN or yIndex >= self.yN or \\\n self.grid[xIndex][yIndex]", "def points_out_of_bounds(pc, bounds):\n oob = np.zeros(len(pc), dtype=bool)\n for comparison in _iter_points_out_of_bounds(pc, bounds):\n oob = np.logical_or(comparison, oob)\n return oob", "def within(self, x, y):\n return x >= self.top_x and x <= self.bottom_x and y >= self.bottom_y and y <= self.top_y", "def assert_good_bounds(bounds, pts):\n assert bounds.llx <= bounds.urx\n assert bounds.lly <= bounds.ury\n\n # The bounds must contain all the points.\n assert all(bounds.llx <= pt.x <= bounds.urx for pt in pts)\n assert all(bounds.lly <= pt.y <= bounds.ury for pt in pts)\n\n # Each edge of the bounds must touch at least one point.\n assert any(bounds.llx == pt.x for pt in pts)\n assert any(bounds.lly == pt.y for pt in pts)\n assert any(bounds.urx == pt.x for pt in pts)\n assert any(bounds.ury == pt.y for pt in pts)", "def test_inrange():\n assert cs.any > 0\n assert cs.any < cmax", "def within(self, lon: np.ndarray, lat: np.ndarray) -> np.ndarray:\n lon = normalize_longitude(lon, lon_min=self.min_corner.lon)\n lon_is_in_range = (lon >= self.min_corner.lon) | (\n lon <= self.max_corner.lon\n ) if self.max_corner.lon < self.min_corner.lon else (\n lon >= self.min_corner.lon) & (lon <= self.max_corner.lon)\n\n return (lat >= self.min_corner.lat) & (\n lat <= self.max_corner.lat) & lon_is_in_range", "def __contains__(self, item):\n if len(item) != len(self.sizes):\n raise ValueError('Point dimension does not match grid dimension')\n for i in range(len(self.sizes)):\n if not 1 <= item[i] < self.sizes[i] - 1:\n return False\n return True", "def past_limit(row, col, matrix):\n return row >= len(matrix) or col >= len(matrix) or matrix[row][col] > 0", "def _withinRangeCheckerWrapper(self, args):\n\n constraints = args['constraints']\n\n def _withinRangeChecker(entity, params):\n \"\"\"Checks if certain properties are within given constrains. \n \"\"\"\n\n for constraint in constraints:\n type = constraint.get('type')\n field = constraint.get('field')\n\n if not type or not field:\n raise ProtocolError()\n\n min_value = constraint.get('min_value', 0)\n max_value = constraint.get('max_value', 1)\n\n if type == 'size':\n value = entity.__getattribute__(field)\n if len(value) < min_value or len(value) > max_value:\n return False\n else:\n raise ProtocolError()\n \n return True\n \n return _withinRangeChecker", "def in_range_if_outside(self, nanobot):\n nearest_point_on_cube = []\n for axis in range(3):\n c = nanobot.coord[axis]\n if c < self.mins[axis]:\n nearest_point_on_cube.append(self.mins[axis])\n elif c > self.maxs[axis]:\n nearest_point_on_cube.append(self.maxs[axis])\n else:\n nearest_point_on_cube.append(c)\n\n return manhattan_dist(nearest_point_on_cube, nanobot.coord) <= nanobot.r" ]
[ "0.7919413", "0.771798", "0.7610702", "0.7590389", "0.73956734", "0.7327171", "0.72736615", "0.7215787", "0.72097933", "0.71924824", "0.7150888", "0.71367943", "0.7099973", "0.70440334", "0.6929075", "0.6919802", "0.6912774", "0.6908318", "0.6906175", "0.6897664", "0.68784577", "0.68718946", "0.6852724", "0.68370044", "0.6738841", "0.6737263", "0.67326397", "0.6729803", "0.670851", "0.6706078", "0.66966814", "0.669216", "0.6668397", "0.66517305", "0.6634259", "0.6612689", "0.65922296", "0.6584807", "0.6560157", "0.6514644", "0.6510233", "0.6498545", "0.6495669", "0.6491672", "0.647955", "0.6463784", "0.64518183", "0.64461505", "0.64440346", "0.64185494", "0.6403402", "0.63917273", "0.6390403", "0.6368096", "0.6365038", "0.63244134", "0.6304618", "0.6296161", "0.6292978", "0.6285284", "0.6285065", "0.6252952", "0.6248016", "0.62453663", "0.6233364", "0.6212643", "0.6210907", "0.6207438", "0.6197138", "0.61923987", "0.6190686", "0.61857766", "0.6182003", "0.616283", "0.61627567", "0.6159344", "0.6140007", "0.6138268", "0.6134696", "0.61163944", "0.6114628", "0.61020416", "0.6097145", "0.6089423", "0.60877115", "0.60835314", "0.6083271", "0.6082952", "0.6080115", "0.6075368", "0.6068354", "0.60626787", "0.60518736", "0.6046814", "0.60423267", "0.6025085", "0.6023725", "0.6018674", "0.60168743", "0.60107404" ]
0.74738806
4
Value for the standard deviation used to mutate the parameter
def sigma(self) -> tp.Union["Array", "Scalar"]: return self.parameters["sigma"] # type: ignore
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def standard_deviation(self):\r\n\t\treturn self.variance()**(1/2)", "def _std(self, data):\n var = stats.var(data)\n if var>0.0:\n sd = math.sqrt(var)\n else:\n sd = 0.0\n return sd", "def standard_dev(self):\n return self.variance()**0.5", "def calculate_std(self) -> float:\n\n if self.data:\n return np.std(self.data)\n else:\n return self.sigma", "def std_dev(self) -> float:\n return math.sqrt(self.variance())", "def standard_deviation( values, sample=False ):\n return ma.sqrt( variance( values, sample ) )", "def std(self):\n return np.sqrt(self.var)", "def sd(vals):", "def standard_deviation(self):\n clean, total = self._prepare_for_stats()\n if not total:\n return None\n\n return math.sqrt(clean.variance())", "def std(self):\n variance, mean = self.variance()\n standard_deviation = variance**0.5\n print(f\"Standard Deviation is: {standard_deviation}\")\n return standard_deviation, mean", "def std(self):\n return np.sqrt(self.alpha) / self.beta", "def standard_deviation(data):\n\n return np.sqrt(variance(data))", "def standard_deviation(data):\n\n return np.sqrt(variance(data))", "def std(self):\r\n return np.std(self.data_array)", "def get_stdev(cls, data: tuple or list, is_population=False) -> float:\n cls._data_validation(data)\n from math import sqrt\n return sqrt(cls.get_var(data, is_population))", "def get_stdev(self):\n var_x = numpy.var(self._x)\n var_y = numpy.var(self._y)\n return numpy.sqrt(var_x + var_y)", "def get_stddev(self):\r\n for i in range(1,len(self.data[0])):\r\n self.stddev.append(np.std(self.data[:,i]))", "def std(self) -> float:\n return self._data.std()", "def standard_deviation(xs: List[float]) -> float:\n return math.sqrt(variance(xs))", "def standard_deviation(xs: List[float]) -> float:\n return math.sqrt(variance(xs))", "def std_dev(l):\n return variance(l)**.5", "def std(self):\n\t\treturn np.sqrt(0.6) #obtained by integrating 1.5x^4 from -1 to 1", "def stddev(self, sample=True):\n distance_squared = list(map(lambda x: (x - sum(self.data)/self.size)**2, self.data))\n\n if sample == True:\n variance = sum(distance_squared)/(self.size - 1)\n stddev = variance**(1/2)\n if sample == False:\n variance = sum(distance_squared)/(self.size)\n stddev = variance**(1/2)\n return stddev", "def get_stdev(self):\n if self._y.mean() == 0:\n raise ValueError('invalid value of mean of y, the ratio is not computable')\n\n var = numpy.mean(self._x ** 2) * numpy.mean(1.0 / self._y ** 2) - \\\n (numpy.mean(self._x) ** 2) * (numpy.mean(1.0 / self._y) ** 2)\n return numpy.sqrt(var)", "def std(self):\n stds = [(x.m-self.mean)**2 + x.std**2 for x in self.xs]\n return np.sqrt(np.dot(self.a, np.array(stds)))", "def std_dev(L, is_sample=0):\n\treturn math.sqrt(variance(L, is_sample))", "def sd(self, dist=None):\n return np.sqrt(self.var(dist))", "def calculate_std_dev(X):\n\tstd_dev = np.sqrt(calculate_variance(X))\n\treturn std_dev", "def std(x):\n return sqrt(TinyStatistician.var(x))", "def std(self):\n\n return self._reduce_for_stat_function(F.stddev, only_numeric=True)", "def std_mean(self):\n std = self.std\n if self.ddof != 0:\n # ddof correction, (need copy of std)\n std = std * np.sqrt(\n (self.sum_weights - self.ddof) / self.sum_weights\n )\n\n return std / np.sqrt(self.sum_weights - 1)", "def pooled_standard_deviation(input_variances):\r\n # compute and return pooled standard deviation\r\n return sqrt(mean(square([float(i) for i in input_variances])))", "def get_std(self):\n std_value = self.df[self.col_name].std()\n return std_value", "def mean_stddev(self):\n if len(self.vs) == 0:\n raise StdDevFilterException\n\n mx = self.mean()\n # compute variance\n variance = sum([(x - mx)**2 for x in self.vs])/len(self.vs)\n # return mean value and standard deviation (square root of variance)\n return mx,math.sqrt(variance)", "def _get_standard_deviation(intermediate_normalization_dict):\n\n num_values = float(intermediate_normalization_dict[NUM_VALUES_KEY])\n multiplier = num_values / (num_values - 1)\n\n return numpy.sqrt(multiplier * (\n intermediate_normalization_dict[MEAN_OF_SQUARES_KEY] -\n intermediate_normalization_dict[MEAN_VALUE_KEY] ** 2\n ))", "def stdev(items):\n return Series.std(Series(items))", "def em_std(self) -> float:\n if self.__total_pulls == 0:\n raise Exception('Number of pulls is 0. No empirical standard deviation.')\n return math.sqrt(\n (self.__sum_of_square_reward -\n self.__total_rewards**2 / self.__total_pulls) / self.__total_pulls)", "def std(self):\n if self.dirty:\n self._finalize()\n if self.vvar is None:\n return 0\n else:\n return math.sqrt(self.vvar)", "def deviation(values, val):\n\tm = mean(values)\n\tdev = abs(val-m)\n\tsd = standard_deviation(values)\n\treturn float(dev)/sd if sd!=0 else 0.0", "def stddev(data, ddof=0):\n n = len(data)\n if n < 2:\n return 0\n ss = _ss(data)\n pvar = ss/(n-ddof)\n return pvar**0.5", "def stdev(data, xbar=None):\n return math.sqrt(variance(data, xbar))", "def StandardDeviation(numlist):\n\tv = Variance(numlist)\n\t#print v\n\treturn math.sqrt(v)", "def global_std_deviation(self):\n\n return np.std(self.average_scores_all_subjects(), axis=0)", "def std(self):\n return self.data.std(axis=-1, keepdims=True)", "def sd(x):\n x_mean = mean(x)\n return (\n sum((x_i - x_mean) ** 2 for x_i in x) / (len(x) - 1)\n ) ** 0.5", "def stdev(values):\n mean = avg(values)\n diffs = [(value - mean) ** 2 for value in values]\n return avg(diffs) ** 0.5", "def stddev(data, ddof=0):\n n = len(data)\n if n < 2:\n raise ValueError('variance requires at least two data points')\n ss = _ss(data)\n pvar = ss/(n-ddof)\n return pvar**0.5", "def std(self, ddof=0, weight_by_area=True):\n return numpy.sqrt(self.var(ddof=ddof, weight_by_area=weight_by_area))", "def stdev_from_mean(x):\r\n x = array(x)\r\n return (x - mean(x)) / std(x)", "def test_stddev(self):\n self.assertEqual(stddev(list1, sample=False), np.std(list1))\n self.assertEqual(stddev(list1), np.std(list1, ddof=1))", "def get_std_dev(self, data):\n mean = 0\n data_arr = []\n for i in data:\n data_arr.append(i[1])\n return statistics.stdev(data_arr)", "def sd(self, v):\n return np.sqrt(np.dot(self.mat_var, v) + self.var_ext)", "def stddev_approx(self, name='stddev_approx'):\n with self._name_and_control_scope(name):\n return tf.math.sqrt(self.variance_approx())", "def get_stdev(self, recalc=False):\n if self.stdev is not None and not recalc:\n return self.stdev\n\n self.stdev = np.std(self.img.ravel())\n return self.stdev", "def standard_deviation_error(y_true, y_pred):\n ...", "def std(mean, vals):\n return sqrt(sum([(i-mean)**2 for i in vals])/len(vals))", "def calc_std(sig):\n return np.std(sig)", "def std(self):\n return self._summarize(lambda c: c.std)", "def standard_deviation(lst):\n\tnum_items = len(lst)\n\tif num_items == 0:\n\t\treturn -1\n\tmean = sum(lst) / num_items\n\tdifferences = [x - mean for x in lst]\n\tsq_differences = [d ** 2 for d in differences]\n\tssd = sum(sq_differences)\n\treturn ssd", "def stddev(r):\n avg = average(r)\n sdsq = sum([(i - avg) ** 2 for i in r])\n return (sdsq / (len(r) - 1 or 1)) ** 0.5", "def get_stdev(self):\n raise NotImplementedError(\"This is an abstract method and needs to be implemented in derived classes.\")", "def getSTD(self, field):\n return np.std([self.fitnesses[i][field] for i in range(len(self.fitnesses))])", "def _standardize(self):\n deviation = np.std(self.series)\n self.series = (self.series - np.mean(self.series)) / (deviation if deviation != 0 else 1)", "def get_std_dev(data, n = -1):\n mean = get_mean(data, n =n)\n\n deviations = []\n\n for i in range(0,n):\n deviations.append( (data[i] - mean)**2 )\n\n std_dev = sqrt( sum(deviations)/n )\n\n return std_dev", "def stdDev(data):\r\n sum = 0\r\n ave = average(data)\r\n for i in data:\r\n sum += (i-ave)**2\r\n return math.sqrt(sum/len(data))", "def _std_err(self):\n return np.sqrt(np.sum(np.square(self._resids), axis=-2) / self._df_err)", "def standard_deviation(list):\n num_items = len(list)\n mean = sum(list) / num_items\n differences = [x - mean for x in list]\n sq_differences = [d ** 2 for d in differences]\n ssd = sum(sq_differences)\n\n\n variance = ssd / num_items\n\n sd = sqrt(variance)\n\n return sd", "def sampleStandardDeviation(numlist):\n\tv = sampleVariance(numlist)\n\t#print v\n\treturn math.sqrt(v)", "def std(self):\n return self.std", "def rmse(self):\n lam = self.lam()\n weights = lam / lam.sum()\n weighted_var = self.var() * weights\n rmse = np.sqrt(weighted_var.sum())\n return rmse", "def calc_stdev(a, b, c, d, e):\n mean_of_num = (a + b + c + d + e) / 5\n return (((a - mean_of_num)**2 + (b - mean_of_num)**2 + (c - mean_of_num)**2\n + (d - mean_of_num)**2 + (e - mean_of_num)**2) / 5) ** 0.5", "def std(self, dset):\n avg = self.mean(dset)\n variance = sum([math.pow(x - avg, 2) for x in dset])\n std = math.sqrt(variance)\n return std", "def get_std(self):\n std = 2 * np.sqrt(np.diag(np.dot(self._Phi.T, np.dot(self._sigma_W, self._Phi))))\n return std", "def get_std(self):\r\n cov = self.to_sparse().diagonal()\r\n std = np.sqrt(cov)\r\n return pd.Series(std, index=self.data.index, name=\"STD\")", "def calc_standard_deviation(data: list) -> float:\n mean = calc_mean(data)\n acc = 0.0\n for n in data:\n acc += (n - mean) ** 2\n acc /= len(data) - 1\n return math.sqrt(acc)", "def sd(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"sd\")", "def std_ddof(self, ddof=0):\n return np.sqrt(self.var_ddof(ddof=ddof))", "def varianza(self):\n\n return np.std(self.__dist_lista)", "def get_sterr(cls, data: tuple or list, is_population=False) -> float:\n cls._data_validation(data)\n from math import sqrt\n return cls.get_stdev(data, is_population) / sqrt(cls.get_n(data))", "def calcSE(self):\n # Make sure the Variance is already calculated\n if not hasattr(self,'ZmAreaVar'):\n self.calcZmAreaVar()\n # Standard Error = Square Root of Variance\n self.SE = self.ZmAreaVar ** 0.5\n # Should I be returning the value also?\n # Or just setting the attribute?\n return self.SE", "def calcSE(self):\n # Make sure the Variance is already calculated\n if not hasattr(self,'ZmAreaVar'):\n self.calcZmAreaVar()\n # Standard Error = Square Root of Variance\n self.SE = self.ZmAreaVar ** 0.5\n # Should I be returning the value also?\n # Or just setting the attribute?\n return self.SE", "def from_stdev(cls, std):\r\n std_ = pd.Series(std)\r\n return cls.from_var(std_ * std_)", "def _calculate_std(self, lc):\n if self.std is None:\n std = np.mean(lc)**0.5\n elif isinstance(self.std, collections.Iterable):\n std = np.mean(self.std) # Iterable of numbers\n else: # Single float number\n std = self.std\n\n return std", "def untruncatedStdDev(self):\n return self._distribution.untrStdDev()", "def stdev(headers, data):\n\tcolumn_matrix=data.get_data(headers)\n\tmean_values=column_matrix.std(0)\n\tstd_values=mean_values.tolist()\n\treturn std_values", "def _std(self):\n\n\t\t#print opt.hess_inv.todense()\n\t\td = 1E-7\n\t\ttheta = self.theta\n\n\t\tTheta = np.copy(theta)\n\t\tTheta[0] = Theta[0] + d\n\t\taa1 = self.objfxn(tuple(Theta))\n\t\tTheta = np.copy(theta)\n\t\tTheta[0] = Theta[0] - d\n\t\taa2 = self.objfxn(tuple(Theta))\n\t\taa3 = self.objfxn(tuple(theta))\n\n\t\tself.stda = 1/np.sqrt((aa1 - 2*aa3 + aa2)/d**2)\n\n\t\tTheta = np.copy(theta)\n\t\tTheta[1] = Theta[1] + d\n\t\tbb1 = self.objfxn(tuple(Theta))\n\t\tTheta = np.copy(theta)\n\t\tTheta[1] = Theta[1] - d\n\t\tbb2 = self.objfxn(tuple(Theta))\n\t\tbb3 = self.objfxn(tuple(theta))\n\n\t\tself.stdb = 1/np.sqrt((bb1 - 2*bb3 + bb2)/d**2)\n\n\t\td = 1E-9\n\t\tTheta = np.copy(theta)\n\t\tTheta[2] = Theta[2] + d\n\t\tcc1 = self.objfxn(tuple(Theta))\n\t\tTheta = np.copy(theta)\n\t\tTheta[2] = Theta[2] - d\n\t\tcc2 = self.objfxn(tuple(Theta))\n\t\tcc3 = self.objfxn(tuple(theta))\n\n\t\tself.stdc = 1/np.sqrt((cc1 - 2*cc3 + cc2)/d**2)\n\n\t\treturn self", "def stddev(self, num_list):\n try:\n mean = self.average(num_list)\n\n minus_mean = []\n\n for number in num_list:\n try:\n minus_mean.append((number - mean) ** 2)\n except Exception as e:\n print(\"Error: \", e)\n\n meany_mean = self.average(minus_mean)\n\n meany_mean = meany_mean ** .5\n\n except Exception as e:\n print(\"Error: \", e)\n\n return meany_mean", "def test_stdev_from_mean(self):\r\n x = [2.1, 4.2, 5.9, 8.4, 9.6]\r\n result = stdev_from_mean(x)\r\n self.assertFloatEqual(\r\n result,\r\n [-1.292463399014413,\r\n -0.60358696806764478,\r\n -0.045925095396451399,\r\n 0.77416589382589174,\r\n 1.1678095686526162])", "def normalize_standard_deviation(dataset):\n return dataset*(1/np.std(dataset))", "def F_std_coeff(d):\n return (d ** 2 - 1) / 4", "def _mean_sd(p_name, builder, pysb_fit):\n p = builder.model.parameters[p_name]\n p_index = builder.model.parameters.index(p)\n p_est_index = builder.estimate_params.index(p)\n p_mean = pysb_fit.params[p_index]\n cov_x = pysb_fit.result[1]\n if cov_x is not None:\n p_sd = np.sqrt(cov_x[p_est_index, p_est_index] *\n np.var(pysb_fit.residuals))\n else:\n p_sd = np.nan\n\n return (p_mean, p_sd)", "def psd(SP):\n psd = (np.mean(SP,axis=1))\n return psd", "def _std(listvalue,ddof=1):\n\tmean=_mean(listvalue)\n\ttemp=[math.pow(i-mean,2) for i in listvalue]\n\tres=math.sqrt(sum(temp)/(len(listvalue)-ddof))\n\treturn res", "def calc_sq_std(df):\n\n sq_std = df.dropna()\n\n sq_std = (df['std'].divide(df['mean']))**2\n\n sq_std.name = 'sq_std'\n\n sq_std = pd.DataFrame(sq_std)\n\n sq_std = sq_std.dropna()\n\n return sq_std", "def std(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False):\n ret = self.var(axis=axis, dtype=dtype, out=out, ddof=ddof, keepdims=keepdims)\n\n ret = np.sqrt(ret)\n return ret", "def variance(self):\n return self.sigma", "def build_std(self):\n param = self.param\n meansp = self.mean()\n stdsp = self.std()\n num_noise = 200\n noise = np.random.normal(1,0.005,(num_noise,self.wvl.size)) # add 0.5% variance to signal at all wavelengths\n # should be at every sp in utc, but for now, use mean sp\n sp_arr = meansp*noise\n #import code; code.interact(local=locals())\n par_noisy = np.array(list(map(lambda tt:param(sp_arr[tt,:],self.wvl),xrange(num_noise))))\n notaxis = tuple(np.where(par_noisy.shape != self.npar)[0])\n stdpar = np.nanstd(par_noisy,axis=notaxis)\n self.stdpar = stdpar\n return stdpar", "def overall_standard_deviation(individual, test_data, truth_data, name=None):\r\n test_data = np.array(test_data)\r\n truth_data = np.array(truth_data)\r\n return np.std(test_data - truth_data)", "def std(values, ave):\n return math.sqrt(float(sum((value-ave)**2 for value in values))/len(values))", "def get_mean_stddev(self):\n return self.get_mean(), self.get_std_dev()", "def GetStandardDeviation(vals_l, mean):\n\n\n sum_deviations_squared = 0\n\n for x in vals_l:\n sum_deviations_squared += (x - mean)**2\n\n return math.sqrt(float(sum_deviations_squared)/float(len(vals_l)))" ]
[ "0.8100612", "0.7911003", "0.7877927", "0.77923644", "0.7742439", "0.76735985", "0.7616291", "0.75770307", "0.753299", "0.74418736", "0.74262315", "0.74040097", "0.74040097", "0.740057", "0.7374691", "0.73520476", "0.7338328", "0.73318505", "0.733133", "0.733133", "0.73204684", "0.7237571", "0.723318", "0.7183897", "0.7161217", "0.7155431", "0.7153569", "0.7149093", "0.7133163", "0.7116844", "0.7114983", "0.7065686", "0.70557684", "0.7050234", "0.70460325", "0.7006982", "0.6983083", "0.6979509", "0.6975852", "0.6959944", "0.69566774", "0.6938858", "0.693721", "0.69269025", "0.6921679", "0.6918395", "0.6882545", "0.68749225", "0.68647414", "0.6862676", "0.685708", "0.68528676", "0.6844229", "0.6830352", "0.6817027", "0.68087596", "0.6808647", "0.67936504", "0.6792107", "0.6785623", "0.67842704", "0.67812926", "0.67732054", "0.6772996", "0.67714334", "0.6769128", "0.67667294", "0.6759841", "0.6759523", "0.6746952", "0.6744798", "0.67442626", "0.6740556", "0.6737656", "0.6734783", "0.6725417", "0.6723857", "0.6712363", "0.67093337", "0.6702605", "0.6702605", "0.66779554", "0.6667178", "0.66592723", "0.6653972", "0.6641778", "0.66285175", "0.66234934", "0.66188556", "0.6616678", "0.66159296", "0.661545", "0.6606188", "0.66015124", "0.6594971", "0.6580131", "0.6576656", "0.65761054", "0.65726495", "0.65660745", "0.6555826" ]
0.0
-1
Bounds all real values into [lower, upper] using a provided method
def set_bounds( self: A, lower: BoundValue = None, upper: BoundValue = None, method: str = "clipping", full_range_sampling: bool = False, a_min: BoundValue = None, a_max: BoundValue = None, ) -> A: # TODO improve description of methods lower, upper = _a_min_max_deprecation(**locals()) bounds = tuple(a if isinstance(a, np.ndarray) or a is None else np.array([a], dtype=float) for a in (lower, upper)) both_bounds = all(b is not None for b in bounds) # preliminary checks if self.bound_transform is not None: raise RuntimeError("A bounding method has already been set") if full_range_sampling and not both_bounds: raise ValueError("Cannot use full range sampling if both bounds are not set") checker = BoundChecker(*bounds) if not checker(self.value): raise ValueError("Current value is not within bounds, please update it first") if not (lower is None or upper is None): if (bounds[0] >= bounds[1]).any(): # type: ignore raise ValueError(f"Lower bounds {lower} should be strictly smaller than upper bounds {upper}") # update instance transforms = dict(clipping=trans.Clipping, arctan=trans.ArctanBound, tanh=trans.TanhBound) if method in transforms: if self.exponent is not None and method != "clipping": raise ValueError(f'Cannot use method "{method}" in logarithmic mode') self.bound_transform = transforms[method](*bounds) elif method == "constraint": self.register_cheap_constraint(checker) else: raise ValueError(f"Unknown method {method}") self.bounds = bounds # type: ignore self.full_range_sampling = full_range_sampling # warn if sigma is too large for range if both_bounds and method != "tanh": # tanh goes to infinity anyway std_bounds = tuple(self._to_reduced_space(b) for b in self.bounds) # type: ignore min_dist = np.min(np.abs(std_bounds[0] - std_bounds[1]).ravel()) if min_dist < 3.0: warnings.warn(f"Bounds are {min_dist} sigma away from each other at the closest, " "you should aim for at least 3 for better quality.") return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def simplebounds(cls, val, lower, upper):\n if val < lower:\n val = lower\n if val > upper:\n val = upper\n return val", "def _bound(x, min_value, max_value):\n return np.maximum(min_value, np.minimum(x, max_value))", "def apply_bound(x, var_min, var_max):\n x.position = np.maximum(x.position, var_min)\n x.position = np.minimum(x.position, var_max)", "def min_values(self, lower, upper): \n if not self.lower_bounds is None:\n return self.lower_bounds\n\n minus = np.clip(self.coeffs,-math.inf,0)\n plus = np.clip(self.coeffs,0,math.inf)\n self.lower_bounds = plus.dot(lower) + minus.dot(upper) + self.const\n \n return self.lower_bounds", "def get_bounds():\n return [0.00], [1.00]", "def bounds(x, xMin, xMax):\n if (x < xMin):\n x = xMin\n elif (x > xMax):\n x = xMax\n return(x)", "def lower_bound(self) -> float:\n ...", "def process_generic(x, lb, ub):\n x = x.abs()\n if x.dtype == 'float64':\n #print('float')\n x.loc[x.apply(lambda x: not x.is_integer())] = np.nan\n x.loc[(x <= lb ) | (x > ub)] = np.nan\n\n return x", "def __normalize(self, value, lower_bound, upper_bound):\n\n min_max_diff = self.max - self.min\n bound_diff = upper_bound - lower_bound\n return (value - self.min) / min_max_diff * bound_diff + lower_bound", "def create_bound_for_scipy(lb, ub):\n lb = tuple(map(convert_inf_to_none, lb))\n ub = tuple(map(convert_inf_to_none, ub))\n return list((lb[i], ub[i]) for i in range(len(ub)))", "def upper_bound(self) -> float:\n ...", "def bounds(self) -> typing.List[float]:\n raise NotImplementedError()", "def normalizefunction(self , values):\n maxv = np.max(values)\n minv = np.min(values)\n def f(x):\n return (x - minv)/(maxv-minv)\n return f", "def SetBounds(self, p_float, p_float_1, p_float_2, p_float_3, p_float_4, p_float_5):\n ...", "def SetBounds(self, p_float, p_float_1, p_float_2, p_float_3, p_float_4, p_float_5):\n ...", "def set_in_bounds(self,obj,val):\n if not callable(val):\n bounded_val = self.crop_to_bounds(val)\n else:\n bounded_val = val\n super(Number,self).__set__(obj,bounded_val)", "def map_bound(value, in_low, in_high, out_low, out_high):\n result = None\n\n if value <= in_low:\n result = out_low\n else:\n if value >= in_high:\n result = out_high\n else:\n # http://stackoverflow.com/a/5650012/574981\n result = out_low + (\n (out_high - out_low) * (value - in_low) / (in_high - in_low)\n )\n return result", "def max_values(self, lower, upper):\n if not self.upper_bounds is None:\n return self.upper_bounds\n\n minus = np.clip(self.coeffs,-math.inf,0)\n plus = np.clip(self.coeffs,0,math.inf)\n self.upper_bounds = plus.dot(upper) + minus.dot(lower) + self.const\n \n return self.upper_bounds", "def get_bounds(self):\n return ([self.t_min] * self.dim,[self.t_max] * self.dim)", "def compute_bounds(self, space):\n bounds = np.zeros((len(space), 2))\n\n for idx, param in enumerate(space):\n\n if TYPE[param[\"type\"]] is TYPE.FLOAT or \\\n TYPE[param[\"type\"]] is TYPE.INTEGER:\n bounds[idx] = (param[\"min\"], param[\"max\"])\n\n elif TYPE[param[\"type\"]] is TYPE.DISCRETE or \\\n TYPE[param[\"type\"]] is TYPE.DISCRETE:\n bounds[idx] = (0, len(param['values']))\n\n return bounds", "def simple_bounds(child, lb, ub):\n assert len(lb) == len(ub), 'Lower and upper bounds have different #s of design variables in simple_bounds function.'\n assert len(lb) == len(child), 'Bounds and child have different #s of design variables in simple_bounds function.'\n for i in range(0, len(child), 1):\n if child[i] < lb[i]:\n child[i] = lb[i]\n\n for i in range(0, len(child), 1):\n if child[i] > ub[i]:\n child[i] = ub[i]\n\n return child", "def normalize_bounds(self, bounds):\n scaled_bounds = []\n scalings = []\n intercepts = []\n \n non_fixed_params = []\n \n print(self.device)\n \n for name, domain in self.bounds.items():\n # Get any fixed parmeters\n if type(domain) == int or type(domain) == float:\n # Take note\n self.fixed_parameters.append(name)\n\n # Free parameters\n elif type(domain) == tuple:\n # Bookkeeping\n self.free_parameters.append(name)\n\n # Get scaling\n lower_bound = min(domain)\n upper_bound = max(domain)\n scale = upper_bound - lower_bound\n\n # Transform to [0, 1] domain\n #scaled_bound = {'name': name, 'type': 'continuous', 'domain': (0., 1.)} #torch.adjustment required\n non_fixed_params.append(name)\n \n # Store\n #scaled_bounds.append(scaled_bound)\n scalings.append(scale)\n intercepts.append(lower_bound)\n else:\n raise ValueError(\"Domain bounds not understood\")\n \n n_hyperparams = len(non_fixed_params)\n \n scaled_bounds = cat([zeros(1,n_hyperparams, device = self.device), \n ones(1, n_hyperparams, device = self.device)], 0)\n return scaled_bounds, tensor(scalings, device = self.device, requires_grad = False), tensor(intercepts, device = self.device, requires_grad = False) #torch.adjustment required", "def get_bound(box_list):\n box_xyxy_list = []\n for box in box_list:\n box_xyxy = xywh2xyxy(box)\n box_xyxy_list.append(box_xyxy)\n\n box_xyxy_list = np.array(box_xyxy_list)\n x1max, y1max, x2max, y2max = np.amax(box_xyxy_list, axis=0)\n x1min, y1min, x2min, y2min = np.amin(box_xyxy_list, axis=0)\n\n boundbox = xyxy2xywh([x1min, y1min, x2max, y2max])\n return boundbox", "def get_bounds():\n lower_bound = 0\n upper_bound = input(\"Please enter a whole number: \")\n domain = [lower_bound, upper_bound]\n return domain", "def _initialize_bounds(problem, bounds, get_bound, set_bound):\n for constraint in problem.constraints:\n root_expr = constraint.root_expr\n expr_bounds = Interval(constraint.lower_bound, constraint.upper_bound)\n if root_expr not in bounds:\n set_bound(root_expr, expr_bounds)\n else:\n existing_bounds = get_bound(root_expr)\n new_bounds = existing_bounds.intersect(expr_bounds)\n set_bound(root_expr, new_bounds)", "def normalize_range(array, floor=0, ceil=1):\n scaler = MinMaxScaler(feature_range=(floor, ceil), copy=True)\n return scaler.fit_transform(array)", "def _autobounds(self):\n bounds = {}\n\n def check(prop, compare, extreme, val):\n opp = min if compare is max else max\n bounds.setdefault(prop, val)\n bounds[prop] = opp(compare(bounds[prop], val), extreme)\n\n def bound_check(lat_lon):\n lat, lon = lat_lon\n check('max_lat', max, 90, lat)\n check('min_lat', min, -90, lat)\n check('max_lon', max, 180, lon)\n check('min_lon', min, -180, lon)\n\n lat_lons = [lat_lon for feature in self._features.values() for\n lat_lon in feature.lat_lons]\n if not lat_lons:\n lat_lons.append(self._default_lat_lon)\n for lat_lon in lat_lons:\n bound_check(lat_lon)\n\n return bounds", "def domain_range(domain, _range=[0, 1], return_transform=False):\n\n if not return_transform:\n return interp1d([min(domain), max(domain)], [min(_range), max(_range)], bounds_error=False)\n else:\n m = interp1d([min(domain), max(domain)], [min(_range), max(_range)])\n return [float(m(v)) for v in domain] # Take float, else returns weird numpy.ndarray element", "def set_slider_bounds(self,lower,upper,inclusive_bounds=None):\n self.bounds = (lower,upper)\n\n if inclusive_bounds is not None:\n self.inclusive_bounds = inclusive_bounds\n\n epsilon = max(self.slider['resolution'],0.00000000001)\n\n if self.inclusive_bounds[0] is False:\n lower+=epsilon\n if self.inclusive_bounds[1] is False:\n upper-=epsilon\n self.slider.config(from_=lower,to=upper)", "def _default_values_like(old_values, value=None, upper=None):\n if value and upper:\n raise ValueError(\"Only one of `value` or `upper` may be proivded.\")\n elif value is not None:\n lower = value\n upper = value\n elif upper is not None:\n lower = 0.\n else:\n raise ValueError(\"Either `value` or `upper` must be provided.\")\n\n new_values = copy.deepcopy(old_values)\n if (isinstance(old_values, np.ndarray) and (old_values.ndim == 1\n or 1 in old_values.shape)):\n new_values[:] = np.linspace(lower, upper, len(new_values))\n else:\n for new_array in new_values:\n new_array[:] = np.linspace(lower, upper, len(new_array))\n\n return new_values", "def bound(self, points):\n if self.bounds is None:\n return points\n\n bounded = np.copy(points) # SLOW!!!\n # Because numpy doesn't handle multi-dimensional arrays the same as 1-dimensional ones, it's easiest to just make it always look like a multi-dim array\n points_shape = points.shape\n cPoints = points_shape[0]\n if cPoints == 1:\n bounded = np.array([bounded, np.zeros_like(bounded)])\n _ignore, cDims = bounded.shape\n for i in xrange(cDims):\n xs = bounded[:,i]\n min_b, max_b = self.bounds[i]\n assert min_b < max_b\n width = max_b - min_b\n # (EDIT: Wrong!) Because of the way that mod works (it wraps negative values around, rather than returning -(abs(x) % abs(y))), we can just use it straight\n # Need to treat neg and pos values different because of behavior of mod operator\n # On second thought, don't use mod, just assume small jumps\n too_far_neg_ixs = xs < min_b\n xs[too_far_neg_ixs] += width\n too_far_pos_ixs = xs > max_b\n xs[too_far_pos_ixs] -= width\n bounded[:,i] = xs # is this necessary? seems so\n if cPoints == 1:\n bounded = bounded[0] # pull back out the 1-dim array\n return bounded", "def map_to_range(val, old_min, old_max, new_min, new_max):\n return new_max - (val - old_min) * (new_max - new_min) / (old_max - old_min)", "def between(min, max):\n def func(x):\n return min <= x <= max\n return func", "def bound(self, lower: Bound, upper: Bound) -> BoundedType:\n return BoundedType(\n self, BoundedType.convert_bound(lower), BoundedType.convert_bound(upper)\n )", "def all_bucket_boundaries(self):\n\n lower = self._lower_bounds[0]\n for i in xrange(1, self.total_buckets):\n upper = self._lower_bounds[i]\n yield (lower, upper)\n lower = upper\n\n yield (lower, float('Inf'))", "def __init__(self,\n low,\n high,\n clipping_lower_bound=-np.inf,\n clipping_upper_bound=np.inf):\n super().__init__()\n self._low = low\n self._high = high\n self._clipping_lower_bound = clipping_lower_bound\n self._clipping_upper_bound = clipping_upper_bound", "def margulis_bound(self, normalized=False):\n if normalized:\n return 5 * np.sqrt(2) / 8\n else:\n return 5 * np.sqrt(2)", "def myround(value, lowerbound, higherbound):\n if value < lowerbound:\n return lowerbound\n if value > higherbound:\n return higherbound\n return value", "def clamp(lower, value, upper):\n if lower > value:\n return lower\n if upper < value:\n return upper\n return value", "def bounds(self):\n return self.xmin, self.xmax, self.ymin, self.ymax", "def rvsWithinbounds(self,lowerBound,upperBound):\n CDFupper = self._distribution.cdf(upperBound)\n CDFlower = self._distribution.cdf(lowerBound)\n randResult = self.rvsWithinCDFbounds(CDFlower,CDFupper)\n return randResult", "def bounds(self): # -> tuple[()]:\n ...", "def _manage_infinity_bounds(problem, _bounds, get_bound, set_bound):\n for variable in problem.variables:\n expr_bounds = get_bound(variable)\n lower_bound = expr_bounds.lower_bound\n upper_bound = expr_bounds.upper_bound\n\n if is_inf(lower_bound):\n new_lower_bound = None\n else:\n new_lower_bound = lower_bound\n\n if is_inf(upper_bound):\n new_upper_bound = None\n else:\n new_upper_bound = upper_bound\n\n set_bound(variable, Interval(new_lower_bound, new_upper_bound))", "def _setBound(self, value):\n if self._colormap is not None:\n if self._index == 0:\n min_ = value\n max_ = self._colormap.getVMax()\n else: # self._index == 1\n min_ = self._colormap.getVMin()\n max_ = value\n\n if max_ is not None and min_ is not None and min_ > max_:\n min_, max_ = max_, min_\n self._colormap.setVRange(min_, max_)", "def clip(x, min, max):\r\n # see decorator for function body\r\n # for grep: clamp, bound\r", "def midrange(lo, hi, mid=0, scale=1.0):\n return [min(mid, (mid + lo) / (1.0 + scale)),\n max(mid, (mid + hi) / (1.0 + scale))]", "def _check(self, vector):\n\n for i, elmt in enumerate(vector):\n\n # checks lower bound\n if (elmt < self.lower[i]):\n vector[i] = self.lower[i]\n\n # checks upper bound\n elif (elmt > self.upper[i]):\n vector[i] = self.upper[i]\n\n return vector", "def apply_bounds(self, column_name, lower_bound=-np.inf,\n upper_bound=np.inf):\n self.check_for_column(column_name)\n\n if lower_bound is None:\n lower_bound = -np.inf\n if upper_bound is None:\n upper_bound = np.inf\n column = self.data[column_name]\n self.data[column_name] = column.clip(lower_bound, upper_bound)", "def _convert_bound(value, lower_bound, upper_bound):\n # Converts value to 16 bit two's complement integer via bitwise.\n most_sig_bit = 0x8000\n\n # Gets the two least significant bits\n convert_val = value & _BYTE << _BYTE_SIZE | value & _BYTE\n # Extends the most significant bit if it is a 1. This is done by\n # carrying out the most significant bit.\n if bool(convert_val & most_sig_bit):\n convert_val |= ~(_BYTE << _BYTE_SIZE | _BYTE)\n\n # Bounds the converted value\n if convert_val > upper_bound:\n return upper_bound\n elif convert_val < lower_bound:\n return lower_bound\n return convert_val", "def process_pain(x, lb, ub):\n x = x.abs()\n x.loc[(x > ub)] = 8\n x.loc[(x < lb) | (x > ub)] = np.nan\n return x", "def map_bound(self, func):\n\n def iter_all():\n for x in self:\n yield from func(x)\n\n return List(iter_all())", "def GetBounds(self):\n ...", "def GetBounds(self):\n ...", "def GetBounds(self):\n ...", "def GetBounds(self):\n ...", "def GetBounds(self):\n ...", "def GetBounds(self):\n ...", "def GetBounds(self):\n ...", "def GetBounds(self):\n ...", "def GetScalarRange(self):\n ...", "def scale_range(data, minTo, maxTo):\n minFrom = np.min(data)\n maxFrom = np.max(data)\n \n scaled_data = []\n \n for point in data:\n new_point = minTo + (maxTo - minTo) * ((point - minFrom)/(maxFrom - minFrom))\n scaled_data.append(new_point)\n \n return scaled_data", "def mapRange(num, min1, max1, min2, max2, clamp=True):\n if(clamp and num < min1):\n return min2\n if(clamp and num > max1):\n return max2\n\n num1 = (num - min1) / (max1 - min1)\n num2 = (num1 * (max2 - min2)) + min2\n return num2", "def constrain(inputVal, lower_limit, upper_limit):\n \n if (inputVal < lower_limit):\n return lower_limit\n elif (inputVal > upper_limit):\n return upper_limit\n else:\n return inputVal", "def external2internal(xe,bounds):\n\n xi = np.empty_like(xe)\n\n for i,(v,bound) in enumerate(zip(xe,bounds)):\n \n a = bound[0] # minimum\n b = bound[1] # maximum\n\n if a == None and b == None: # No constraints\n xi[i] = v\n\n elif b == None: # only min\n xi[i] = np.sqrt( (v-a+1.)**2.-1 )\n\n elif a == None: # only max\n xi[i] = np.sqrt( (b-v+1.)**2.-1 )\n\n else: # both min and max\n xi[i] = np.arcsin( (2.*(v-a)/(b-a))-1.)\n\n return xi", "def view_limits(self, dmin, dmax):\n base = self._select_base(dmin, dmax)\n if mpl.rcParams['axes.autolimit_mode'] == 'round_numbers':\n vmin = base.le(dmin)\n vmax = base.ge(dmax)\n if vmin == vmax:\n vmin -= 1\n vmax += 1\n else:\n vmin = dmin\n vmax = dmax\n\n return mtransforms.nonsingular(vmin, vmax)", "def bounds(self) -> Tensor:\n return torch.cat([self.mins, self.mins + self.ranges], dim=-2)", "def math_map_list(values, toMin=0, toMax=1):\n minValue = min(values)\n maxValue = max(values)\n delta = maxValue - minValue\n deltaTarget = toMax - toMin\n newValues = [toMin +(value-minValue)*deltaTarget/delta for value in values]\n return newValues", "def _clamp_into_bounds(self, positions):\n below_lower_bounds = positions < self.lower_bound\n above_upper_bounds = positions > self.upper_bound\n within_bounds = ~np.logical_or(below_lower_bounds, above_upper_bounds)\n bound_mask = below_lower_bounds * self.lower_bound + above_upper_bounds * self.upper_bound\n return np.where(within_bounds, positions, bound_mask)", "def denormalize_bounds(self, normalized_arguments):\n denormalized_bounds = (normalized_arguments * self.bound_scalings) + self.bound_intercepts\n return denormalized_bounds", "def normalize(x, lower=-1, upper=1):\n x_norm = (upper - lower)*((x - np.min(x)) / (np.max(x) - np.min(x))) + lower\n return x_norm", "def linear_space(min_value=0, max_value=1.0, length=10, round_op=None):\n\n out = []\n value = min_value\n length = max(2, length)\n delta = (float(max_value) - float(min_value)) / float(length - 1.0)\n\n for index in range(length - 1):\n out.append(round_op(value) if round_op else value)\n value += delta\n\n out.append(round_op(max_value) if round_op else max_value)\n return out", "def rescale_to_range(\n array: vtk.vtkDoubleArray,\n to_range: typing.Tuple[float, float],\n rel_tol: float = sys.float_info.epsilon,\n abs_tol: float = sys.float_info.epsilon,\n) -> vtk.vtkDoubleArray:\n to_span = to_range[1] - to_range[0]\n assert to_span >= 0\n\n # The values need to span a positive range to be able to scale to `to_range`.\n # We use at least a small span derived from the tolerances.\n array_range = array.GetValueRange()\n array_span = array_range[1] - array_range[0]\n array_center = array_range[0] + array_span / 2\n from_range = (\n array_range\n if not math.isclose(array_span, 0.0, rel_tol=rel_tol, abs_tol=abs_tol)\n else (\n array_center - max(rel_tol * abs(array_center), abs_tol),\n array_center + max(rel_tol * abs(array_center), abs_tol),\n )\n )\n from_span = from_range[1] - from_range[0]\n\n assert not math.isclose(from_span, 0.0, rel_tol=rel_tol, abs_tol=abs_tol)\n factor = to_span / from_span\n\n result = vtk.vtkDoubleArray()\n result.SetNumberOfValues(array.GetNumberOfValues())\n for id in range(array.GetNumberOfValues()):\n result.InsertValue(\n id, to_range[0] + (array.GetValue(id) - from_range[0]) * factor\n )\n\n return result", "def _get_bounds(x, y, size):\n x = np.array(np.atleast_1d(x))\n y = np.array(np.atleast_1d(y))\n\n lower_x = np.rint(x - size[0]/2)\n lower_y = np.rint(y - size[1]/2)\n\n return np.stack((np.stack((lower_x, lower_x + size[0]), axis=1),\n np.stack((lower_y, lower_y + size[1]), axis=1)), axis=1).astype(int)", "def _mask_for_values_between_exponent_limits(self):\n mask_between_exp_limits = None\n new_exp_lower = new_exp_upper = None\n if self.exp_lower is not None:\n new_exp_lower = self.exp_lower + 1\n if self.exp_upper is not None:\n new_exp_upper = self.exp_upper - 1\n if (new_exp_lower is not None) and (new_exp_upper is not None):\n if new_exp_lower <= new_exp_upper:\n mask_between_exp_limits = self.exponent.interval_mask(new_exp_lower, new_exp_upper)\n else:\n mask_between_exp_limits = self.exponent.interval_mask(new_exp_lower, new_exp_upper)\n\n return mask_between_exp_limits", "def test_calculate_crow_bounds_scale_parameter_type1(self):\n\n _bounds = calculate_crow_bounds(22, 620.0, 0.4239, 0.6142, 0.9, 2, 1)\n self.assertAlmostEqual(_bounds[0], 0.2870230)\n self.assertAlmostEqual(_bounds[1], 0.6279656)", "def bcRange(self):\n\t\treturn fabs(self.Upper - self.Lower)", "def view_limits(self, vmin, vmax):\n return vmin, vmax\n # return nonsingular(vmin, vmax)", "def limit_by(self, field, lower_bound=None, upper_bound=None):\n if lower_bound is not None and upper_bound is not None:\n indices = (self[field] >= lower_bound) \\\n & (self[field] <= upper_bound)\n elif lower_bound is None:\n indices = self[field] <= upper_bound\n elif upper_bound is None:\n indices = self[field] >= lower_bound\n else:\n raise ValueError(\"One bound must be set!\")\n return self[indices]", "def fit_to_range(val: float, a: float, b: float, a1: float, b1: float) -> float:\n new_value = ((val - a) / (b - a)) * (b1 - a1) + a1\n return new_value", "def fill_bounds(x, upper, lower, axis, c = 'k', alpha = 0.2):\n axis.fill_between(\n x, \n upper[:, 0], \n lower[:, 0], \n facecolor = c,\n alpha = alpha\n )", "def _process_individual_bound(self, val):\n if(val not in [True, False]):\n raise ValueError('For composition bounds expected are iether True' \n '(free function) or False (fixed function) not %s' % (str(val)))\n return val", "def merge_bounds(ibounds):\n # Coerce Nones to Infs\n all_bounds = [InfBounds(*bounds) for bounds in ibounds]\n \n # Extract mins/maxs of axes\n all_bounds = np.array(all_bounds)\n return Bounds(all_bounds[:,0].min(), all_bounds[:,1].min(), all_bounds[:,2].min(),\n all_bounds[:,3].max(), all_bounds[:,4].max(), all_bounds[:,5].max())", "def bounds(self, pos):", "def calcBRange(c,n=10):\n \n bMin = -abs(c)/2.0 \n bMax = abs(c)/2.0 \n return np.linspace(bMin,bMax,n)", "def Range(self, from: int, to: int) -> BaseVector:", "def initializeDistribution(self):\n self.minVal = min(math.exp(self.upperBound),math.exp(self.lowerBound))\n self.maxVal = max(math.exp(self.upperBound),math.exp(self.lowerBound))", "def integrate_range(self, lower, upper):\n if upper>self.upper:\n upper=self.upper\n if lower<self.lower:\n lower = self.lower\n\n i_l = int(np.floor((lower-self.lower)/self._dx))\n i_u = int(np.floor((upper-self.lower)/self._dx))\n #print \"i_l \",i_l,\" i_u \",i_u\n total = 0.0\n for i in range(i_l,i_u):\n total+= self.y[i]*self._dx\n return total", "def cpfclamp(f, min_, max_):\n return min(max(f, min_), max_)", "def rangify(v, lb, ub):\n if lb >= ub:\n lb, ub = ub, lb\n return max(min(v, ub), lb)", "def _process_individual_bound(self, val):\n if(val == True):\n res = self._DEF_BOUNDS\n \n elif(val in [False, None]):\n res = val\n else:\n if(len(val) != 2):\n raise ValueError('Bound value is not recognized. '% (str(val)))\n if(val[0] > val[1]):\n raise ValueError('Bound values are inverted '% (str(val)))\n res = val\n return res", "def test_calculate_crow_bounds_scale_parameter_type2(self):\n\n _bounds = calculate_crow_bounds(22, 620.0, 0.4239, 0.6142, 0.9, 2, 2)\n self.assertAlmostEqual(_bounds[0], 0.2870230)\n self.assertAlmostEqual(_bounds[1], 0.5827754)", "def set_lim(values, scale):\n\n v_min, v_max = min(values), max(values)\n margin = (v_max - v_min) * scale\n v_min, v_max = v_min - margin, v_max + margin\n\n return v_min, v_max", "def range(self) -> ty.Tuple[float, float]:\r\n ...", "def condition_bounds(self) -> Tuple[float, float]:\n raise NotImplementedError", "def bounds(self) -> Box:\n raise NotImplementedError()", "def get_bounds():\n bounds = [\n (0.1, 0.5), # Omega_m\n (0.05, 0.15) # beta\n ]\n return np.array(bounds)", "def get_bounds(self):\n raise Exception(\"Non-implemented base class method.\")", "def GetBounds(self, p_int, p_int_1, p_float=..., p_float=..., p_float=..., p_float=..., p_float=..., p_float=...):\n ...", "def GetNiceExtentsBySpacing(minval,maxval,spacing,tolerance):\n pass", "def scale_1d(x):\n return (min(x), max(x), len(x))" ]
[ "0.71283054", "0.68890387", "0.65130097", "0.6450955", "0.6424436", "0.63653696", "0.63593006", "0.6322421", "0.62083405", "0.62054557", "0.6173497", "0.60966706", "0.60829145", "0.607084", "0.607084", "0.6065957", "0.59931934", "0.59931576", "0.5980716", "0.5976944", "0.5952933", "0.59390056", "0.5938644", "0.59251064", "0.59105074", "0.5901531", "0.5863563", "0.5862887", "0.586234", "0.58564186", "0.58522654", "0.5837468", "0.5835325", "0.581516", "0.5803356", "0.579754", "0.57741046", "0.57720655", "0.57579184", "0.5755867", "0.575411", "0.57529944", "0.57529885", "0.57422775", "0.57294345", "0.57244694", "0.5724442", "0.57223237", "0.57028717", "0.57020676", "0.5701765", "0.57013726", "0.57013726", "0.57013726", "0.57013726", "0.57013726", "0.57013726", "0.57013726", "0.57013726", "0.5700492", "0.56994164", "0.568886", "0.5671197", "0.5670546", "0.566861", "0.56628746", "0.5655141", "0.56496936", "0.56481785", "0.56385726", "0.56305146", "0.56269646", "0.5626468", "0.561064", "0.56071335", "0.5605811", "0.5599743", "0.5597283", "0.55964035", "0.5589498", "0.5574467", "0.55620503", "0.55551124", "0.55507046", "0.5547866", "0.5544103", "0.5536198", "0.55329055", "0.55315673", "0.55311954", "0.55172086", "0.5514323", "0.5513582", "0.5511127", "0.55094516", "0.5508412", "0.55076355", "0.5504136", "0.5500703", "0.54982316" ]
0.71421045
0
Output will be cast to integer(s) through deterministic rounding.
def set_mutation(self: A, sigma: tp.Optional[tp.Union[float, "Array"]] = None, exponent: tp.Optional[float] = None) -> A: if sigma is not None: # just replace if an actual Parameter is provided as sigma, else update value (parametrized or not) if isinstance(sigma, core.Parameter) or isinstance(self.parameters._content["sigma"], core.Constant): self.parameters._content["sigma"] = core.as_parameter(sigma) else: self.sigma.value = sigma # type: ignore if exponent is not None: if self.bound_transform is not None and not isinstance(self.bound_transform, trans.Clipping): raise RuntimeError(f"Cannot set logarithmic transform with bounding transform {self.bound_transform}, " "only clipping and constraint bounding methods can accept itp.") if exponent <= 1.0: raise ValueError("Only exponents strictly higher than 1.0 are allowed") if np.min(self._value.ravel()) <= 0: raise RuntimeError("Cannot convert to logarithmic mode with current non-positive value, please update it firstp.") self.exponent = exponent return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def intify(x):\n return int(x) if almost_equal(x, round(x)) else x", "def ir(some_value):\r\n return int(round(some_value))", "def rint(a: Number) -> int:\n return np.round(a).astype(int)", "def _float2int(x: float) -> int:\n return round(x * 100)", "def __int__(self) -> int:\n\n return int(self.__float__())", "def int_r(f):\n return int(np.round(f))", "def distance_to_int(mi):\n return round(change_precision(mi, 2))", "def as_int(self) -> int:\n return int(self.to_ot2_equivalent().value)", "def as_int(self):\n number = 0\n n = 1\n for i in reversed(self.qubit_values):\n number += n*i\n n = n << 1\n return number", "def __int__(self) -> int:\n # If denominator is 1:\n if self.denom:\n return (-self.numer_prod() if self.neg\n else self.numer_prod())\n else:\n return int(self.__float__())", "def round_repeats(repeats):\n return int(repeats)", "def round_repeats(repeats):\n return int(repeats)", "def base2int(self, float_number):\r\n return int(round(float_number * self.mult_base))", "def _decimal_to_int64(decimal: Decimal) -> int:\n return int(f\"{decimal:0f}\".replace(\".\", \"\"))", "def disc(x):\n return int(round(x))", "def to_int(self) -> int:\n return self.as_int", "def itemsToInt(self):\n returnvalue = Matrix()\n for row in self._value:\n newRow = list()\n for item in row:\n # round the item to 3 decimal places before converting,\n # so floats like 1.999999964 become 2, not 1\n newRow.append(int(round(item, 3)))\n returnvalue.addRow(*newRow)\n return returnvalue", "def roundUP(x):\n\treturn int(ceil(x / 10.0)) * 10", "def __int__( self ):\r\n\t\treturnvalue = self.numerator / self.denominator\r\n\t\tif ( type( returnvalue ) == types.ComplexType ):\r\n\t\t\treturnvalue = int( abs( returnvalue ) )\r\n\t\telse:\r\n\t\t\treturnvalue = int( returnvalue )\r\n\t\treturn returnvalue", "def _to_int(self, num):\n assert isinstance(num, Number), 'Is not number in _to_int'\n return floor(self.__tick_to_unit_time * num)", "def INT(val):\n return math.floor(val)", "def xx_int(self):\n return np.int32(np.round(self.xx))", "def _nint(x):\n\n return int(x + 0.5)", "def toint(number):\n if isinstance(number, float):\n if number > 1:\n number = round(number, 0)\n else:\n # The following solves when image has small dimensions (like 1x54)\n # then scale factor 1 * 0.296296 and `number` will store `0`\n # that will later raise ZeroDivisionError.\n number = round(math.ceil(number), 0)\n return int(number)", "def __int__(self):\n return int(self.micros() // 1000000)", "def round(x):\n return int(x + copysign(0.5, x))", "def get_round() -> int:\n return store.round", "def roundMultiple(x, base=4):\n return int(base * round(float(x)/base))", "def __int__(self) -> int:\n return self._translate_in_type(int, self.float_num)", "def roundup_int(x, m):\n\treturn int(math.ceil(x / float(m))) * m", "def quote2int(self, float_number):\r\n return int(round(float_number * self.mult_quote))", "def whole_number_to_int(value: AnyBasicType) -> AnyBasicType:\n if isinstance(value, float) and value.is_integer():\n return int(value)\n return value", "def iceil(x):\n return np.ceil(x).astype(np.int)", "def intround(value):\n\n return int(decimal.Decimal.from_float(\n value).to_integral_value(decimal.ROUND_HALF_EVEN))", "def iround(x):\n return int(round(x) - .5) + (x > 0)", "def roundInt(value, places):\n\n if places == 0:\n value = int(round(value, 0))\n else:\n value = round(value, places)\n return value", "def __float__(self) -> float:\n return self._translate_in_type(float, self.integer)", "def round(data):\n return _make.round(data)", "def __int__(self):\n return self.get_raw_int()", "def as_int(self):\n return self.as_type(int)", "def convert_ints_to_floats(in_ints, divider):\n return (in_ints.astype(numpy.float64) / divider)", "def round4_decimal(d):\n return int(round4_float(d))", "def rint(flt: float) -> int | float:\n return int(rounded) if (rounded := round(flt, 2)).is_integer() else rounded", "def _convert_int(self) -> pd.Series:\n\n if self.requires_nan:\n dtype = \"float\"\n else:\n dtype = \"int\"\n\n return self._convert(dtype=dtype)", "def nearest_int(num):\n return int(np.round(num))", "def round_int(dec):\n\n return int(decimal.Decimal(dec).quantize(decimal.Decimal('0'), decimal.ROUND_HALF_UP))", "def float_to_int_64(x):\n return np.float64(x).view(np.int64)", "def to_int(self, input):\n result = 0\n potence = 0\n\n while (len(input) > 0):\n result += self.decimal_value_of(input[-1]) * \\\n pow(self.alphabet_len, potence)\n input = input[0:-1]\n potence += 1\n\n return result", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def py3round(number):\n if abs(round(number) - number) == 0.5:\n return int(2.0 * round(number / 2.0))\n\n return int(round(number))", "def convert_integer_price(decimal_price):\n return int(float(decimal_price) * 100)", "def to_integer(dict_):\n for key in dict_.keys():\n if isinstance(dict_[key], float):\n dict_[key] = int(dict_[key])\n return dict_", "def _irep_to_value(self,n,i):\n if i == 1:\n j,k = divmod(n,9)\n v = (k+1)*10**j\n return v\n else:\n j,k = divmod(n,int(10.0/i))\n if k == 0:\n v = 10**j\n else:\n v = i*k*10**j\n return v", "def integerize(self):\n self.x = int(self.x)\n self.y = int(self.y)", "def integerize(self):\n self.x = int(self.x)\n self.y = int(self.y)", "def conv_i_to_dec(i):\n return 1800 + i*10", "def __int__(self) -> int:\n integer_number = self.numerator_a // self.denominator_b\n print(f'Fraction {self.fraction} integer number is {integer_number}')\n return integer_number", "def round_down(x):\n return int(math.floor(x / 10.0)) * 10", "def _coerce_to_integer(value):\n try:\n return int(value)\n except ValueError:\n return int(float(value))", "def lat_to_int(lat):\n lat = int((Decimal(lat) * 10000000).quantize(Decimal('1'), rounding=ROUND_HALF_UP))\n return min(900000000, max(-900000000, lat))", "def __round__(self, ???):", "def to_int(self):\n\n if not self.is_structural():\n\n return -1\n\n value = gs.all_elements.index(self.path[1]) * 100\n\n value += (self.depth - 2) * 10\n\n return value", "def integer(self, interval=None, count=1):\n results = []\n for _ in range(count):\n generated = self.float01()\n if interval is not None:\n results.append(\n int((interval[1] - interval[0] + 1) * generated + interval[0]))\n else:\n if generated < 0.50:\n results.append(0)\n else:\n results.append(1)\n if count == 1:\n return results.pop()\n return results", "def to_int(toks):\n return int(toks[0])", "def __float__(self):\n\t\toutput = 0.0\n\n\t\tfor index,value in enumerate(self):\n\t\t\tif value > 0:\n\t\t\t\toutput+=float(value) * (10 ** -index)\n\n\t\treturn output", "def lon_to_int(lon):\n lon = int((Decimal(lon) * 10000000).quantize(Decimal('1'), rounding=ROUND_HALF_UP))\n return (lon + 1800000000) % 3600000000 - 1800000000", "def wl_to_int(w: float) -> int:\n return int(np.floo(w * 100))", "def int_format(self):\n ...", "def float2int(img_float):\n img = img_float * (MAX_VALUE - 1)\n img = img.astype(int)\n return img", "def getInteger(self):", "def getInteger(self):", "def int_to_python(self, value):\r\n return int(value)", "def convert2int(img):\n if img.min() == 0:\n return img\n if img.dtype == \"uint8\":\n return img - 2**8 / 2\n elif img.dtype == \"uint16\":\n return img - 2**16 / 2\n elif img.dtype == \"uint32\":\n return img - 2**32 / 2\n else:\n return img", "def __make_numerator_integer(self):\n while self.numerator % 1 !=0:\n self.denominator *=10\n self.numerator *=10", "def floor(n: float) -> int:\n return int(n // 1)", "def to_number(self):\n return self._to_number", "def int2dec(n: int) -> str:", "def dec2int(r: str) -> int:", "def get_int2(self):\n pass", "def _int2float(x: int) -> float:\n return round(x / 100, 2)", "def my_round(x, base=10):\n return base * round(x / base)", "def _round_bits(n: int, radix_bits: int) -> int:\n return (n + radix_bits - 1) // radix_bits", "def convert(num_lst):\r\n dec = 0\r\n for i in range(0, len(num_lst)):\r\n print(\"position right to left is >\", i+1,\r\n \"value is \", BASE[(num_lst[i])],\r\n \"decimal value is\",\r\n (36**i) * BASE[(num_lst[i])])\r\n dec += (36**i) * BASE[(num_lst[i])]\r\n return dec", "def numerize():\n pass", "def safeIntCast(vec, tol=1e-3):\n vec = np.array(vec)\n vec_rounded = vec.round()\n\n if np.any(np.abs(vec - vec_rounded) > tol):\n raise ValueError('Rounding too large', np.abs(vec - vec_rounded))\n\n return tuple(vec_rounded.astype(np.int8))", "def toInt(self):\r\n rgb = self.toHex()[1:]\r\n return int(float.fromhex(rgb))" ]
[ "0.6755552", "0.671412", "0.66324544", "0.65836495", "0.64938366", "0.64298236", "0.63975763", "0.62805474", "0.619245", "0.616559", "0.6150345", "0.6150345", "0.61298555", "0.6088794", "0.6077856", "0.6072032", "0.6043717", "0.6036559", "0.5983136", "0.5977387", "0.59487563", "0.58904886", "0.5849658", "0.5849593", "0.58420706", "0.5839702", "0.58198625", "0.580894", "0.5801359", "0.5781069", "0.5776656", "0.57592255", "0.5756623", "0.5737623", "0.57359874", "0.5729242", "0.5702788", "0.5695212", "0.569045", "0.5687136", "0.5682122", "0.56769437", "0.56761354", "0.56681114", "0.56593126", "0.5653458", "0.5652457", "0.5651417", "0.5648559", "0.5648559", "0.5648559", "0.5648559", "0.5648559", "0.5648559", "0.5648559", "0.5648559", "0.5648559", "0.5648559", "0.5648559", "0.5648559", "0.5648559", "0.5648559", "0.5648559", "0.5648559", "0.5646449", "0.5628563", "0.56079274", "0.56065303", "0.559428", "0.559428", "0.55840796", "0.5561082", "0.5556269", "0.5545503", "0.55235714", "0.55054903", "0.55033654", "0.5466465", "0.54635364", "0.5448281", "0.54399914", "0.543749", "0.54357064", "0.543504", "0.5430215", "0.5430215", "0.54299295", "0.54229677", "0.5414537", "0.541391", "0.54077715", "0.53869134", "0.53822994", "0.53755593", "0.5374812", "0.5374592", "0.5364873", "0.53616047", "0.5359518", "0.53502196", "0.5336234" ]
0.0
-1
Output will be cast to integer(s) through deterministic rounding. Returns self
def set_integer_casting(self: A) -> A: self.integer = True return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __int__(self) -> int:\n\n return int(self.__float__())", "def itemsToInt(self):\n returnvalue = Matrix()\n for row in self._value:\n newRow = list()\n for item in row:\n # round the item to 3 decimal places before converting,\n # so floats like 1.999999964 become 2, not 1\n newRow.append(int(round(item, 3)))\n returnvalue.addRow(*newRow)\n return returnvalue", "def __int__(self) -> int:\n # If denominator is 1:\n if self.denom:\n return (-self.numer_prod() if self.neg\n else self.numer_prod())\n else:\n return int(self.__float__())", "def __int__( self ):\r\n\t\treturnvalue = self.numerator / self.denominator\r\n\t\tif ( type( returnvalue ) == types.ComplexType ):\r\n\t\t\treturnvalue = int( abs( returnvalue ) )\r\n\t\telse:\r\n\t\t\treturnvalue = int( returnvalue )\r\n\t\treturn returnvalue", "def to_int(self) -> int:\n return self.as_int", "def as_int(self) -> int:\n return int(self.to_ot2_equivalent().value)", "def to_representation(self, instance):\n return instance.to_number", "def as_int(self):\n number = 0\n n = 1\n for i in reversed(self.qubit_values):\n number += n*i\n n = n << 1\n return number", "def __int__(self):\n return self.get_raw_int()", "def to_number(self):\n return self._to_number", "def __float__(self):\n\t\toutput = 0.0\n\n\t\tfor index,value in enumerate(self):\n\t\t\tif value > 0:\n\t\t\t\toutput+=float(value) * (10 ** -index)\n\n\t\treturn output", "def integerize(self):\n self.x = int(self.x)\n self.y = int(self.y)", "def integerize(self):\n self.x = int(self.x)\n self.y = int(self.y)", "def __float__(self) -> float:\n return self._translate_in_type(float, self.integer)", "def __int__(self) -> int:\n integer_number = self.numerator_a // self.denominator_b\n print(f'Fraction {self.fraction} integer number is {integer_number}')\n return integer_number", "def __float__(self):\n return float(self.number)", "def __int__(self) -> int:\n return self._translate_in_type(int, self.float_num)", "def round(self):\n return self._round", "def __int__(self):\n return int(self.micros() // 1000000)", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def as_integer_ratio(self): # real signature unknown; restored from __doc__\n pass", "def value(self):\n return super().value() / self._precision", "def __int__(self) -> int:\n\n return self.centi", "def to_int(self):\n\n if not self.is_structural():\n\n return -1\n\n value = gs.all_elements.index(self.path[1]) * 100\n\n value += (self.depth - 2) * 10\n\n return value", "def _get_number(self):\n return Decimal(str(self)[1:])", "def __round__(self, ???):", "def round_repeats(repeats):\n return int(repeats)", "def round_repeats(repeats):\n return int(repeats)", "def as_int(self):\n return self.as_type(int)", "def __make_numerator_integer(self):\n while self.numerator % 1 !=0:\n self.denominator *=10\n self.numerator *=10", "def getInteger(self):", "def getInteger(self):", "def __int__(self):\n\n return self.value", "def __int__(self):\n return int(str(self),2)", "def ir(some_value):\r\n return int(round(some_value))", "def roundItems(self, digits=0):\n returnvalue = Matrix()\n for row in self._value:\n newRow = list()\n for item in row:\n item = round(item, digits)\n if (digits <= 0):\n item = int(item)\n newRow.append(item)\n returnvalue.addRow(*newRow)\n return returnvalue", "def __make_denominator_integer(self):\n while self.denominator % 1 !=0:\n self.denominator *=10\n self.numerator *=10", "def xx_int(self):\n return np.int32(np.round(self.xx))", "def _convert_int(self) -> pd.Series:\n\n if self.requires_nan:\n dtype = \"float\"\n else:\n dtype = \"int\"\n\n return self._convert(dtype=dtype)", "def apy(self) -> Decimal:\n apy = (1 + self.interest / self.n_periods) ** self.n_periods - 1\n return self._quantize(apy * 100)", "def rint(a: Number) -> int:\n return np.round(a).astype(int)", "def get_round() -> int:\n return store.round", "def getint(self, fraction) -> int:\n self.numerator_a = fraction.numerator_a\n self.denominator_b = fraction.denominator_b\n self.fraction = str(self.numerator_a) + '/' + str(self.denominator_b)\n return super().__int__()", "def int_format(self):\n ...", "def __float__(self) -> float:\n return float(self.p)", "def __int__(self): \n return int(self.val())", "def __float__(self):\n return self.num/self.denom", "def __float__(self):\n return self.num/self.denom", "def distance_to_int(mi):\n return round(change_precision(mi, 2))", "def __float__(self):\n return self.micros() / 1000000.0", "def getInteger(self):\n pass", "def _quantize(self) :\n self.A[self.A <= self.thr] = -1\n self.A[self.A > self.thr] = 1\n self.A = self.A.astype(\"int\")", "def _to_int(self, num):\n assert isinstance(num, Number), 'Is not number in _to_int'\n return floor(self.__tick_to_unit_time * num)", "def get_int2(self):\n pass", "def decimals(self) -> int:\n return self._parseChunkToInt(self.chunks[7], 1, 1, True)", "def _irep_to_value(self,n,i):\n if i == 1:\n j,k = divmod(n,9)\n v = (k+1)*10**j\n return v\n else:\n j,k = divmod(n,int(10.0/i))\n if k == 0:\n v = 10**j\n else:\n v = i*k*10**j\n return v", "def get_decimal(self):\n return self.decimal", "def __repr__(self):\n return \"<%d,%d>%f\" % (self.int_bits,self.frac_bits,self.decode(self.encoded))", "def to_inch(self):\r\n if self.units != 'inch':\r\n self.units = 'inch'\r\n for statement in self.statements:\r\n statement.to_inch()\r\n for tool in iter(self.tools.values()):\r\n tool.to_inch()\r\n for primitive in self.primitives:\r\n primitive.to_inch()\r\n for hit in self.hits:\r\n hit.to_inch()", "def normalize(self):\n return (1. / abs(self)) * self", "def __int__(self):\n return int(self.obj)", "def intify(x):\n return int(x) if almost_equal(x, round(x)) else x", "def numerize():\n pass", "def get_precision(self):\n ...", "def base2int(self, float_number):\r\n return int(round(float_number * self.mult_base))", "def numerator(self):\n return +self", "def num (self):\n return self.value[0]/self.value[1]", "def disc(x):\n return int(round(x))", "def rounds(self):\n if self.round_number > 0:\n for i in range(self.round_number):\n yield Round(i + 1)", "def setValue(self, value):\n super().setValue(int(value * self._precision))", "def number(self):\n number = self._interval_class._number\n direction = mathtools.sign(number)\n number = abs(number) + (12 * self.octaves)\n return number * direction", "def floatize(self):\n self.x = float(self.x)\n self.y = float(self.y)", "def floatize(self):\n self.x = float(self.x)\n self.y = float(self.y)", "def __float__(self):\n return self.num / self.denom # result of / is of type float", "def mixed(self):\n numer = self.numer_prod()\n denom = self.denom_prod()\n self.numer = RationalFrac.factorize(numer % denom)\n return numer // denom", "def _float2int(x: float) -> int:\n return round(x * 100)", "def __round__(self, *args, **kwargs): # real signature unknown\n pass", "def __round__(self, *args, **kwargs): # real signature unknown\n pass", "def __round__(self, *args, **kwargs): # real signature unknown\n pass", "def __round__(self, *args, **kwargs): # real signature unknown\n pass", "def __round__(self, *args, **kwargs): # real signature unknown\n pass", "def __round__(self, *args, **kwargs): # real signature unknown\n pass", "def __round__(self, *args, **kwargs): # real signature unknown\n pass", "def __round__(self, *args, **kwargs): # real signature unknown\n pass", "def __round__(self, *args, **kwargs): # real signature unknown\n pass", "def __round__(self, *args, **kwargs): # real signature unknown\n pass" ]
[ "0.70756865", "0.6518239", "0.65143543", "0.64591146", "0.62661624", "0.62452054", "0.6234085", "0.6169121", "0.6146257", "0.6132525", "0.60545045", "0.6035522", "0.6035522", "0.6019279", "0.59916437", "0.59584105", "0.5947643", "0.5937057", "0.59202385", "0.5870171", "0.5870171", "0.5870171", "0.5870171", "0.5870171", "0.5870171", "0.5870171", "0.5870171", "0.5870171", "0.5870171", "0.5870171", "0.5870171", "0.5870171", "0.5870171", "0.5870171", "0.5870171", "0.5866849", "0.58428097", "0.5802088", "0.5786771", "0.578178", "0.57619596", "0.57619596", "0.5745752", "0.5739561", "0.57368594", "0.57368594", "0.5717922", "0.56906736", "0.5677214", "0.5654567", "0.5639208", "0.5607246", "0.5598341", "0.5545919", "0.5543327", "0.5537492", "0.5493601", "0.5493487", "0.5492729", "0.54805803", "0.54776216", "0.54776216", "0.54650056", "0.54458857", "0.54346484", "0.54209286", "0.5419732", "0.54126686", "0.53948754", "0.5389014", "0.5370463", "0.53511685", "0.534688", "0.53458214", "0.5345138", "0.5344577", "0.53271407", "0.5324344", "0.5314745", "0.53145653", "0.5307588", "0.5295013", "0.52852523", "0.52848464", "0.52838284", "0.5278593", "0.5278593", "0.52741605", "0.5260907", "0.5252567", "0.52522236", "0.52522236", "0.52522236", "0.52522236", "0.52522236", "0.52522236", "0.52522236", "0.52522236", "0.52522236", "0.52522236" ]
0.59119105
19
Converts array with appropriate shapes to reduced (uncentered) space by applying log scaling and sigma scaling
def _to_reduced_space(self, value: np.ndarray) -> np.ndarray: sigma = self.sigma.value if self.bound_transform is not None: value = self.bound_transform.backward(value) distribval = value if self.exponent is None else np.log(value) / np.log(self.exponent) reduced = distribval / sigma return reduced.ravel() # type: ignore
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rescale_data(self):\n\n # Dividing every array of simulated data vectors by the mean of that array.\n '''# Didnt work\n for key in self.data.keys():\n self.data[key] /= np.mean(self.data[key])\n '''\n\n self.rescaled = True\n\n # Mean normalization\n \"\"\" didnt work\n for key in self.data.keys():\n self.data[key] -= np.mean(self.data[key])\n self.data[key] /= (np.max(self.data[key]) - np.min(self.data[key]))\n \"\"\"\n\n # Median normalization\n \"\"\" didnt work, still dividing by large number \n for key in self.data.keys():\n self.data[key] -= np.median(self.data[key])\n self.data[key] /= (np.max(self.data[key]) - np.min(self.data[key]))\n \"\"\"\n\n # Divide by median\n \"\"\" didnt work\n for key in self.data.keys():\n self.data[key] -= np.median(self.data[key])\n self.data[key] /= (np.median(self.data[key]))\n \"\"\"\n\n # Take logarithm of data\n \"\"\" didnt work\n for key in self.data.keys():\n self.data[key] = np.log10(self.data[key])\n \"\"\"\n\n # Scale by length of vector\n \"\"\"\n for key in self.data.keys():\n self.data[key] /= np.linalg.norm(self.Cl_noiseless)\n \"\"\"\n\n \n # Scale by negative of the natural logarithm \n for key in self.data.keys():\n self.data[key] = -1 * np.log(self.data[key]) \n \n \"\"\"\n # Scale by subtracting the mean and dividing by std\n std = np.nanstd(self.data['data'])\n mean = np.nanmean(self.data['data'])\n for key in self.data.keys():\n # self.data[key] -= np.log(self.Cl_noiseless) # -1* # scale this same way\n # self.data[key] -= self.Cl_noiseless # -1* # scale this same way\n self.data[key] -= mean \n self.data[key] /= std\n \"\"\"", "def normalize_log_likelihoods(X):\n h, w = np.shape(X)\n return X - np.tile(logsumexp(X, axis=0), (h, 1))\n # return X - np.matlib.repmat(logsumexp(X, axis=0), h, 1)", "def scale(x: np.ndarray) -> tuple[FloatArray, BoolArray]:\n logx = np.log2(x + 1)\n mask_1d = ~np.isclose(np.nanstd(logx, axis=0), 0.0)\n scaled_x = standardize(logx[:, mask_1d], axis=0)\n scaled_x[np.isnan(scaled_x)] = 0\n return scaled_x, mask_1d", "def log_normalize(a, axis=None):\n if axis is not None and a.shape[axis] == 1:\n # Handle single-state GMMHMM in the degenerate case normalizing a\n # single -inf to zero.\n a[:] = 0\n else:\n with np.errstate(under=\"ignore\"):\n a_lse = logsumexp(a, axis, keepdims=True)\n a -= a_lse", "def softmax_ndarray(logits: jnp.DeviceArray) -> jnp.DeviceArray:\n assert len(logits.shape) == 2\n # Normalise for better stability.\n s = jnp.max(logits, axis=1, keepdims=True)\n e_x = jnp.exp(logits - s)\n return e_x / jnp.sum(e_x, axis=1, keepdims=True)", "def log_normalize(log_prob, axis):\n log_sum = logsumexp(log_prob, axis=axis)\n \n if not isinstance(log_sum, np.ndarray):\n log_sum = np.array([log_sum])\n if log_prob.shape[0] == log_sum.shape[0]:\n # column normalize \n return (log_prob.transpose() - log_sum).transpose()\n else:\n # row normalize\n return log_prob - log_sum", "def scalenans(X) :\n\tXscale = (X - np.nanmean(X)) / np.nanstd(X)\n\treturn Xscale", "def normalise(self,data,take_logs:bool=False):\n\n # Normalise vector to sum up to 1\n normalised_vector = data/np.sum(data)\n\n # If take logs is selected, take logs\n if take_logs:\n return np.log(normalised_vector)\n else:\n return normalised_vector", "def transform(self, original_input):\n return super(LogarithmicDimension, self).transform(np.log(original_input))", "def scale(X, *, axis=..., with_mean=..., with_std=..., copy=...):\n ...", "def rescale(self):\n # Get the L1 norm of data and scale correction for each fiber\n data_dims = self.data_dims\n if data_dims is ():\n tens_scale = self.data.abs()\n else:\n tens_scale = torch.sum(self.data.abs(), dim=data_dims, keepdim=True)\n log_shift = torch.floor(TARGET_SCALE(self.shape, data_dims) - \n torch.log2(tens_scale))\n\n # Keep the scale for zero fibers unchanged\n if torch.any(torch.isinf(log_shift)):\n log_shift = torch.where(torch.isfinite(log_shift), log_shift,\n torch.zeros_like(log_shift))\n\n return STensor(self.data*(2**log_shift), \n self.scale-log_shift)", "def rescale_toa(arr, dtype=np.float32):\n # First look at raw value dists along bands\n\n arr_trans = np.subtract(arr, arr.min(axis=(1, 2))[:, np.newaxis, np.newaxis])\n arr_rs = np.divide(arr_trans, arr_trans.max(axis=(1, 2))[:, np.newaxis, np.newaxis])\n if dtype == np.uint8:\n arr_rs = np.array(arr_rs*255, dtype=np.uint8)\n return arr_rs", "def preprocess(data,scale):\n ##log_transformation\n #data['log_sale_price'] = np.log(data['sale_price'])\n #data['log_lot_area'] = np.log(data['lot_area'])\n #data['house_age'] = data['year_sold']- data['year_built']\n \n y = data['stay']\n \n #sales['log_sale_price'] = np.log(sales['sale_price'])\n #sales['log_lot_area'] = np.log(sales['lot_area'])\n #sales['house_age'] = sales['year_sold']- sales['year_built']\n data_dummy = data.copy()\n \n #dummy coding\n data_scale = pd.get_dummies(data_dummy).drop(columns = ['stay'])\n\n \n #scale the value\n if scale == True:\n S = StandardScaler().fit(data_scale)\n data_scale = S.transform(data_scale)\n \n return y, data_scale", "def max_normalization(array):\n return 1/np.max(array) * array.squeeze(axis=1)", "def _scale(x, axis=None):\n x = _remove_baseline(x, axis=axis)\n x /= np.std(x, ddof=1, axis=axis, keepdims=True)\n return x", "def __convert_to_log(self):\n for i in range(self.nStates):\n if self.pi[i]>0:\n self.pi[i]=log(self.pi[i])\n else:\n self.pi[i]=float('-inf')\n for j in range(self.nStates):\n if self.t[i][j]>0:\n self.t[i][j]=log(self.t[i][j])\n else:\n self.t[i][j]=float('-inf')\n for j in range(self.nObs):\n if self.e[i][j]>0:\n self.e[i][j]=log(self.e[i][j])\n else:\n self.e[i][j]=float('-inf')\n self.logdomain=True", "def scale_input(array):\n # scaled_input = (input-mean)/standard deviation\n scale_sepal_length = (array[0] - 5.84) / 0.83\n scale_sepal_width = (array[1] - 3.05) / 0.43\n scale_petal_length = (array[2] - 3.76) / 1.76\n scale_petal_width = (array[3] - 1.20) / 0.76\n return [\n scale_sepal_length,\n scale_sepal_width,\n scale_petal_length,\n scale_petal_width,\n ]", "def transform(self, data):\n #scaled_transform = data + self.sc_factor * (data* (1-self.sigma) - self.mu) / self.sigma\n # scaling = 1+self.sc_factor*(self.sigma-1)\n # scaling = tf.clip_by_value(scaling, 1.0e-8, 1.0e8)\n\n scaled_transform = (data-self.mu)/(tf.maximum(tf.sqrt(self.var)*self.sc_factor, 1e-2))\n return scaled_transform", "def normalize(self,arr):\n arr = arr/(arr.max()/255.0)\n return arr", "def scale_data(x):\n mu = x.mean(axis=0)\n sigma = x.std(axis=0)\n x = (x - mu) / sigma\n return (x, mu, sigma)", "def transform(self, sess, xs):\n return sess.run( [self.z_mean, self.z_log_sigma_sq],\n feed_dict={self.x: xs} )", "def _normalize(images):\n images -= images.mean(axis=0, keepdims=True)\n images /= np.maximum(images.std(axis=0, keepdims=True), 3e-1)", "def _normalize(images):\n images -= images.mean(axis=0, keepdims=True)\n images /= np.maximum(images.std(axis=0, keepdims=True), 3e-1)", "def normalize_features(array):\n \n array_normalized = (array-array.mean())/array.std()\n mu = array.mean()\n sigma = array.std()\n\n return array_normalized, mu, sigma", "def standardize(X):\n mu = X.mean(axis=0, keepdims=True)\n s = X.std(axis=0, keepdims=True)\n return (X-mu)/s", "def simple_scaling(input_data):\n\n # Insert debugging assertions\n assert type(input_data) is np.ndarray, \"The 'input_data' must be numpy array.\"\n\n # Get the minimum values of the input numpy array along the axis \n Max = np.max(input_data, axis = 0)\n\n # Simple sclaing \n scaled_input_data = input_data / (Max + sys.float_info.min)\n\n # Return scaled input data\n return scaled_input_data", "def _process(self, data: np.array) -> np.array:\n # pylint: disable=no-member\n return unp.sqrt(data[..., 0] ** 2 + data[..., 1] ** 2) * self.scale", "def _process(self, data: np.ndarray) -> np.ndarray:\n return data[..., 0] * self.scale", "def normalize(array):\n\treturn array/np.max(array)", "def normalize(arr: np.ndarray) -> np.ndarray:\n if max(arr) - min(arr) == 0:\n logger.warning(\n \"Normalize averted a div/0, the input data was:\\n {0}\".format(arr)\n )\n return np.ones(len(arr))\n return (arr - min(arr)) / (max(arr) - min(arr))", "def transform_data(data, cols, standardize = True, log_trans = True):\r\n data = data[cols]\r\n if log_trans:\r\n y = np.log(data)\r\n y[y == -np.inf] = 0\r\n y[y == np.inf] = 0\r\n data = (y- data.mean(axis = 0)) / data.std(axis = 0)\r\n\r\n if standardize:\r\n data = (data- data.mean(axis = 0)) / data.std(axis = 0)\r\n\r\n return data", "def _process(self, data: np.ndarray) -> np.ndarray:\n return data[..., 1] * self.scale", "def safe_log(arr, threshold=d.eps):\n arr = np.asarray(arr)\n # if type(arr) == np.ndarray:\n arr[arr < threshold] = threshold\n # else:\n # arr = max(threshold, arr)\n logged = np.log(arr)\n return logged", "def _scale_array(arr, clip=True):\n if clip:\n scaled = np.clip(arr, 0, 255)\n else:\n scale_range = (max([arr.min(), 0]), min([arr.max(), 255]))\n scaled = _min_max_scale(arr, new_range=scale_range)\n\n return scaled", "def robust_scale(X, *, axis=..., with_centering=..., with_scaling=..., quantile_range=..., copy=..., unit_variance=...):\n ...", "def _fspecial_gauss_1d(self, size, sigma):\n coords = torch.arange(size).to(dtype=torch.float)\n coords -= size // 2\n g = torch.exp(-(coords ** 2) / (2 * sigma ** 2))\n g /= g.sum()\n return g.reshape(-1)", "def log_filter(stack, sigma):\n stack_cp = stack.astype(np.int16)\n gauss = ndi.filters.gaussian_filter(stack_cp, sigma=sigma)\n log = ndi.filters.laplace(gauss)\n return log", "def normalize(X):\n\tX = X - np.mean(X,axis=1)[:,np.newaxis]\n\tX = X/np.std(X,axis=0)[np.newaxis,:];\n\tX = X - np.mean(X,axis=0)[np.newaxis,:]\n\treturn X", "def normalize(arr):\n m = np.min(arr)\n arr = arr - m\n M = np.max(arr)\n arr = arr / M\n return arr", "def normalize(data):\n norm_matrix = np.int_(np.log10(data)**2)\n norm_matrix = map(lambda x: x if x < BOARD_SIZE else BOARD_SIZE, norm_matrix)\n norm_matrix = map(lambda x: x if x > 0 else 0, norm_matrix)\n return norm_matrix", "def rescale(data_matrix):\n means, stdevs = scale(data_matrix)\n\n def rescaled(i, j):\n if stdevs[j] > 0:\n return (data_matrix[i][j] - means[j]) / stdevs[j]\n else:\n return data_matrix[i][j]\n\n num_rows, num_cols = shape(data_matrix)\n return make_matrix(num_rows, num_cols, rescaled)", "def centrageReduction(data):\n X = data.values\n std_scale = preprocessing.StandardScaler().fit(X)\n x_scaled = std_scale.transform(X)\n return x_scaled", "def normalize(self, arr):\r\n\r\n\t\t#Set the cap for arr at self.value_max and self.value_max\r\n\t\t#this prevents outliers of breaking the previously predicted p_func\r\n\t\tarr_capped = arr * (arr <= self.value_max) + self.value_max * (arr > self.value_max)\t#cap to value_max\r\n\t\tarr_capped = arr_capped * (arr_capped >= self.value_min) + self.value_min * (arr_capped < self.value_min)\t#cap to value_min\r\n\r\n\t\t#Normalize array\r\n\t\tnorm_factor = self.get_norm_factor(arr_capped)\r\n\t\tnormalized = arr * norm_factor\r\n\r\n\t\treturn(normalized)", "def log_softmax(x: jnp.DeviceArray, *, axis: int = 0) -> jnp.DeviceArray:\n return x - jnp.expand_dims(jnp.log(jnp.sum(jnp.exp(x), axis=axis)), axis)", "def lognormalize(x, temp = 1):\n if type(x) is list: x = np.array(x)\n\n x = x - np.max(x)\n # anneal\n xp = np.power(np.exp(x), temp)\n return xp / xp.sum()", "def sigma_R(field, scale):\n field_filtered = filter_Field(field, tophat_kernel, (scale,))\n return field_filtered.t.std()", "def scale(inp: np.ndarray, new_min: float = 0., new_max: float = 1.,\n axis: int = -1) -> np.ndarray:\n xmax = inp.max(axis=axis, keepdims=True)\n xmin = inp.min(axis=axis, keepdims=True)\n a = (inp-xmin) / (xmax - xmin)\n y = a * (new_max - new_min) + new_min\n return y", "def log_normalise(data, vmin, vmax):\n result = np.ma.masked_less_equal(data, 0, copy=False)\n if vmin > vmax:\n raise ValueError(\"minvalue must be less than or equal to maxvalue\")\n elif vmin <= 0:\n raise ValueError(\"values must all be positive\")\n elif vmin == vmax:\n result.fill(0)\n else:\n mask = np.ma.getmask(result)\n result = np.ma.array(np.clip(result.filled(vmax), vmin, vmax), mask=mask)\n\n resdat = result.data\n mask = result.mask\n if mask is np.ma.nomask:\n mask = (resdat <= 0)\n else:\n mask |= (resdat <= 0)\n\n np.log(resdat, resdat)\n resdat -= np.log(vmin)\n resdat /= (np.log(vmax) - np.log(vmin))\n result = np.ma.array(resdat, mask=mask, copy=False)\n\n return result", "def scale(arrayin,Amin,Amax,mask=None):\r\n if (mask==None) and (arrayin.max() - arrayin.min())!=0.0 :\r\n Bmax = arrayin.max()\r\n Bmin = arrayin.min()\r\n elif (arrayin.max() - arrayin.min())!=0.0 :\r\n ny = arrayin.shape[0]\r\n nx = arrayin.shape[1]\r\n Bmax = arrayin.min()\r\n Bmin = arrayin.max()\r\n for i in range(ny):\r\n for j in range(ny):\r\n if mask[i,j] > 0.5e0 :\r\n if arrayin[i,j] < Bmin :\r\n Bmin = arrayin[i,j]\r\n if arrayin[i,j] > Bmax :\r\n Bmax = arrayin[i,j]\r\n else :\r\n print \"andrew.bagOfns.scale : warning (arrayin.max() - arrayin.min())=0.0 \"\r\n return np.copy(arrayin)\r\n\r\n arrayout = (arrayin - Bmin)*(Amax - Amin) / (Bmax - Bmin) + Amin\r\n return arrayout", "def standardize(x, axis=-1):\n stds_avg = np.std(x, axis=axis, keepdims=True)\n x -= np.mean(x, axis=axis, keepdims=True)\n x /= (stds_avg + 1e-8)\n return x", "def scale_and_ignore_zeros(data,target):\n # make sure inputs are numpy arrays\n \n if not isinstance(data,np.ndarray):\n data=np.array(data)\n if not isinstance(target,np.ndarray):\n target=np.array(target)\n \n if min(data) == 0:\n zero_indx_data = np.argwhere(data==0)\n data_t = np.delete(data,zero_indx_data)\n min_data = min(data_t)\n else:\n min_data = min(data)\n zero_indx_data = []\n \n if min(target) == 0:\n zero_indx_target = np.argwhere(target==0)\n target_t = np.delete(target,zero_indx_target)\n min_target = min(target_t)\n else:\n min_target = min(target)\n \n data = data - min_data\n data = data/max(data)\n\n rng = max(target) - min_target\n \n scaled_data = data*rng + min_target\n\n scaled_data[zero_indx_data] = 0\n \n return scaled_data", "def standardize(X):\n\n scaler = StandardScaler()\n X_scaled = scaler.fit_transform(X)\n return X_scaled", "def normalisation_l_inf(x):\n res = np.zeros(x.shape)\n for i in range(x.shape[0]):\n for j in range(x.shape[1]):\n res[i,j] = x[i,j]/(np.max(x[i,j])+1e-5)\n return(res)", "def log_scale_ds9(im, lexp=1.e12, cmap=[7.97917, 0.8780493], scale=[-0.1,10]):\n import numpy as np\n \n contrast, bias = cmap\n clip = (np.clip(im, scale[0], scale[1])-scale[0])/(scale[1]-scale[0])\n clip_log = np.clip((np.log10(lexp*clip+1)/np.log10(lexp)-bias)*contrast+0.5, 0, 1)\n \n return clip_log", "def _normalize(a: np.ndarray, u: float=0, s: float=1) -> np.ndarray:\n a_norm = (a - np.mean(a)) / (np.std(a) + STABILITY)\n a_rescaled = a_norm * s + u\n\n return a_rescaled", "def _normalize(weights, axis, log=True):\n if log:\n normalizer = tf.reduce_logsumexp(weights, axis=axis, keepdims=True)\n return weights - normalizer\n normalizer = tf.reduce_sum(weights, axis=axis)\n return weights / normalizer", "def normalise(array,tot=1.0):\r\n tot1 = np.sum(np.abs(array)**2)\r\n if tot1 == 0.0 :\r\n print 'bg.normalise : warning sum array = 0'\r\n arrayout = np.copy(array)\r\n else :\r\n arrayout = array * np.sqrt(tot / tot1)\r\n return arrayout", "def clean_log_arrays(self, logs):\n concat_log = (np.concatenate(logs)).T\n log_dict = {'a': concat_log[0], 'b': concat_log[1], 'dt': concat_log[2], 'm': concat_log[3]}\n log_df = pd.DataFrame.from_dict(log_dict) # make into DF again for cleaning\n # Tiny Performances time defined to be in [0,5.0], thus set limits\n log_df.set_value(log_df[log_df.dt > 5].index, 'dt', 5.0)\n log_df.set_value(log_df[log_df.dt < 0].index, 'dt', 0.0)\n # Tiny Performance bounds defined to be in [[0,1],[0,1]], edit to fix this.\n log_df.set_value(log_df[log_df.a > 1].index, 'a', 1.0)\n log_df.set_value(log_df[log_df.a < 0].index, 'a', 0.0)\n log_df.set_value(log_df[log_df.b > 1].index, 'b', 1.0)\n log_df.set_value(log_df[log_df.b < 0].index, 'b', 0.0)\n if self.verbose:\n # Check values:\n print(\"\\ndescriptions of log values:\")\n print(\"\\nall logs:\")\n print(log_df.describe())\n print(\"\\ndescription of taps:\")\n # As a rule of thumb, could classify taps with dt>0.1 as taps, dt<0.1 as moving touches.\n print(log_df[log_df.m == 0].describe())\n print(\"\\ndescription of moving touches:\")\n print(log_df[log_df.m == 1].describe())\n return np.array(log_df[['a', 'b', 'dt']])", "def transform(self, X, y='deprecated', copy=None): \n\n check_is_fitted(self, 'scale_')\n\n copy = copy if copy is not None else self.copy\n\n #X = check_array(X, copy=copy, warn_on_dtype=True,\n # estimator=self, dtype=FLOAT_DTYPES,\n # force_all_finite='allow-nan')\n\n if self.with_mean:\n X -= self.mean_\n if self.with_std:\n X /= self.scale_\n return X", "def Log_Scaling(Posterior,q,InvV,m_points):\n\n m = InvV.n #get the number of maps being used \n d = InvV.d\n\n g_est = np.zeros(m_points.num)\n \n for j in range(m):\n #backmap the points from the posterior to the intermediate\n backmap = m_points.map(InvV,j)\n #determine the current mixture using a change of variables\n det = InvV.L[j,:,:].diagonal().prod()**2\n g_est += q[j] * multivariate_normal.pdf(backmap.all,mean=np.zeros(d),cov=np.eye(d)) * det\n \n #now we have the total mixture\n s = np.log(Posterior) - np.log(g_est)\n\n \n r = s - np.mean(s)\n misfit = 0.5 * np.linalg.norm(r)\n return s, r, misfit", "def normalize(X, mu, sigma):\n return (X - mu) / sigma", "def normalize(data):\n data = numpy.asmatrix(data)\n std_devs = numpy.std(data, axis=1)\n std_devs[std_devs == 0] = 1 # prevent div by 0\n return (data - numpy.mean(data, axis=1)) / std_devs", "def transform(a):\n return np.array([np.mean(a), np.std(a)])", "def standardize_back(xs, offset, scale):\n try:\n offset = float(offset)\n except:\n raise ValueError('The argument offset is not None or float.')\n try:\n scale = float(scale)\n except:\n raise ValueError('The argument scale is not None or float.')\n try:\n xs = np.array(xs, dtype=\"float64\")\n except:\n raise ValueError('The argument xs is not numpy array or similar.')\n return xs*scale + offset", "def logit_transform(params, bounds):\n with np.errstate(divide=\"ignore\", invalid=\"ignore\"):\n ret_array = np.ma.array(\n [np.ma.log(np.true_divide((x - a), (b - x))) for x, (a, b) in zip(params, bounds)])\n ret_array.set_fill_value(0)\n return np.ma.filled(ret_array)", "def loglam_minmax(self):\n return np.log10(8000.0), np.log10(25700)", "def normalize(traces):\n\n start = time()\n\n avg_intens = np.mean(traces, axis=1)\n n_traces = traces.shape[0]\n n_frames = traces.shape[1]\n\n # zero-center the traces\n centered_cy5 = np.zeros((n_traces, n_frames))\n for i in range(n_traces):\n centered_cy5[i, :] = traces[i, :] - avg_intens[i]\n\n scaled_data_cy5 = np.zeros((n_traces, n_frames))\n\n for i in range(n_traces):\n cy5_trc = centered_cy5[i, :]\n cy5_min = cy5_trc.min()\n cy5_max = cy5_trc.max()\n if cy5_min == cy5_max:\n scaled_data_cy5[i] = np.ones(cy5_trc.shape) \n else:\n scaled_data_cy5[i] = (cy5_trc - cy5_min) / (cy5_max - cy5_min)\n\n print(\"Time passed: \" + str(time() - start))\n\n return scaled_data_cy5", "def clamp_sigma(self, sigma, sigma_min=.01):\n self.logsigma.data.clamp_(np.log(sigma_min), np.log(sigma))", "def scaling():\n \n for i in range(cfg.nfea):\n dm = 0\n var = 0\n for j in range(cfg.ntrain):\n dm += cfg.a[j,i]\n dm = dm/cfg.ntrain\n \n for j in range(cfg.ntrain):\n var += (cfg.a[j,i]-dm)**2\n\n var = var/cfg.ntrain\n var = np.sqrt(var)\n \n if var >= 10**(-5):\n cfg.clin[i] = 1.0/var \n cfg.dlin[i] = -dm/var \n \n else: \n if np.abs(dm)<=1.0:\n cfg.clin[i] = 1.0\n cfg.dlin[i] = 0.0 \n else: \n cfg.clin[i] = 1.0/dm\n cfg.dlin[i] = 0.0 \n \n for j in range(cfg.ntrain):\n cfg.a_scaled[j,i] = cfg.clin[i]*cfg.a[j,i] + cfg.dlin[i]\n \n return", "def eeg_rms(array, axis=0):\t\t\n\treturn np.sqrt(np.mean(array ** 2,axis))", "def calculate_stds(array):\n\n\trgb = np.moveaxis(array, 3, 0)\n\trgb = np.reshape(rgb, (3,-1))\n\n\tlms = rgb2lms(rgb)\n\tdel rgb\n\tc = np.log(10)\n\tlog_lms = np.log(lms)/c\n\tdel lms\n\tlab = lms2lab(log_lms)\n\tdel log_lms\n\n\treturn np.std(lab, axis=1)", "def fit_input_array(input_array, base_matrix):\n flattened_size = numpy.prod(input_array.shape)\n return numpy.int_(numpy.ceil(numpy.log10(flattened_size)/numpy.log10(base_matrix.shape[0])))", "def scale(matrix, s):\n x, y = matrix.shape\n print(matrix.shape)\n\n print(np.ones(matrix.shape))\n # b = np.ones(x, y)\n # print(\"ones:\", b)\n return np.kron(a, np.ones((x, y)))", "def log_normalized_den(self, X):\n raise NotImplementedError()", "def normalize(data):\n\n\t#return [float(x) / pow(2, 15) for x in data]\n\n\tl = [float(x) / pow(2, 15) for x in data]\n\treturn np.asarray(l)", "def log_scale(self, value: float):\n assert value > 1\n self.__log_scale = value\n self.logarithmic = self.logarithmic", "def scale(input):\n return (input - np.min(input)) / ((np.max(input) - np.min(input)))", "def clr_trans(data):\n if isinstance(data, pd.DataFrame):\n data = data.to_numpy()\n data[data == 0] = 1/len(data[0,:])**2 # replace zeros with small constant\n geometric_mean = mstats.gmean(data,axis=1)\n clr_data = np.log(data / geometric_mean[:,None])\n return clr_data", "def log_transform(x, epsilon = 1e-4):\n if x.min() < 0: epsilon += np.abs(x.min())\n return (x.fillna(0).astype(float) + epsilon).apply(np.log)", "def logGauss(self):\n #firstly initialise an array to store the values\n log = np.zeros([self.num])\n \n #now want to loop through each of the points in the collections\n for i in range(self.num):\n #get the point as an array\n point = self.pick(i)\n #key characteristic of standard normal: can treat as product of independent 1D normals\n log[i] = self.d - np.log(np.sqrt(2 * np.pi)) - 0.5 * np.sum(point**2)\n return log", "def _transform(self, X: Tensor) -> Tensor:\n X_new = X.clone()\n X_new[..., self.indices] = X_new[..., self.indices].log10()\n return X_new", "def _normalize(X: np.ndarray) -> np.ndarray:\n # return X * np.sqrt(1 / np.sum(X ** 2, axis=1))[:, None]\n return X * np.sqrt(X.shape[1] / np.sum(X ** 2, axis=1))[:, None]", "def standardize(X, axis=0, ddof=0):\n\n # Modified from scikit-learn.preprocessing.scale()!\n\n #X = np.asarray(X)\n X = np.asarray(X, dtype=np.float) # XXX: what about dtype? convert to float64? for higher precision? let client decide?\n Xr = np.rollaxis(X, axis) # view on X to enable broadcasting on the axis we are interested in\n \n mean_ = Xr.mean(axis=0)\n std_ = Xr.std(axis=0, ddof=ddof)\n std_[std_ == 0.0] = 1.0 # avoid NaNs due to div/zero\n\n # center mean on zero\n Xr -= mean_\n\n # Verify that mean_1 is 'close to zero'. If X contains very\n # large values, mean_1 can also be very large, due to a lack of\n # precision of mean_. In this case, a pre-scaling of the\n # concerned feature is efficient, for instance by its mean or\n # maximum.\n mean_1 = Xr.mean(axis=0)\n if not np.allclose(mean_1, 0.0):\n warnings.warn(\"Numerical issues were encountered \"\n \"when centering the data \"\n \"and might not be solved. Dataset may \"\n \"contain too large values. You may need \"\n \"to prescale your features.\")\n Xr -= mean_1\n mean_ += mean_1\n\n # scale to unit variance\n Xr /= std_\n\n # If mean_2 is not 'close to zero', it comes from the fact that\n # std_ is very small so that mean_2 = mean_1/std_ > 0, even if\n # mean_1 was close to zero. The problem is thus essentially due\n # to the lack of precision of mean_. A solution is then to\n # substract the mean again.\n mean_2 = Xr.mean(axis=0)\n if not np.allclose(mean_2, 0.0):\n warnings.warn(\"Numerical issues were encountered \"\n \"when scaling the data \"\n \"and might not be solved. The standard \"\n \"deviation of the data is probably \"\n \"very close to 0.\")\n Xr -= mean_2\n mean_ += mean_2\n\n # Additional check if variances are 'close to one'\n std_1 = Xr.std(axis=0, ddof=ddof)\n if not np.allclose(std_1, 1.0):\n warnings.warn(\"Numerical issues were encountered \"\n \"when scaling the data \"\n \"and might not be solved. Standard deviation \"\n \"not close to one after scaling.\")\n\n return X, mean_, std_", "def normalize(array):\n high = array.max()\n low = array.min()\n rng = high - low\n array[:] = 1.0 - ((high - array) / rng)", "def scale(self, data: np.ndarray):\n if self.scale_type == \"min_max\":\n scaled_data = (data - self.predictor_min) / (\n self.predictor_max - self.predictor_mean\n )\n elif self.scale_type == \"normalize\":\n scaled_data = (data - self.predictor_mean) / (\n self.predictor_max - self.predictor_min\n )\n elif self.scale_type == \"standardize\":\n scaled_data = (data - self.predictor_mean) / self.predictor_std\n elif self.scale_type == \"scale\":\n scaled_data = data - self.predictor_mean\n else:\n scaled_data = data\n return scaled_data", "def _normalize_(x: np.array) -> np.array:\n if x.max() != 0:\n x = x / x.max()\n return np.clip(x, 0, 1)# ensure that no values are >1\n else:\n raise ZeroDivisionError('Image Normalization')", "def convert_logsumexp(g, op, block):\n\n input_x = g.get_node(op.input(\"X\")[0])\n axis = op.attr(\"axis\")\n if op.attr(\"reduce_all\"):\n axis = None\n keepdims = op.attr(\"keepdim\")\n out = get_relay_op(\"logsumexp\")(input_x, axis=axis, keepdims=keepdims)\n if not axis and not keepdims:\n out = _op.expand_dims(out, axis=0)\n g.add_node(op.output(\"Out\")[0], out)", "def normalise(x):\n return (x - jnp.min(x)) / (jnp.max(x) - jnp.min(x))", "def _min_max_scale(arr, new_range=(0, 255)):\n # get array's current min and max\n mn = arr.min()\n mx = arr.max()\n\n # check if scaling needs to be done to be in new_range\n if mn < new_range[0] or mx > new_range[1]:\n # perform min-max scaling\n scaled = (new_range[1] - new_range[0]) * (arr - mn) / (mx - mn) + new_range[0]\n else:\n # return array if already in range\n scaled = arr\n\n return scaled", "def rescale_arr(arr, amin, amax):\r\n\r\n # old bounds\r\n m = arr.min()\r\n M = arr.max()\r\n # scale/offset\r\n s = float(amax - amin) / (M - m)\r\n d = amin - s * m\r\n\r\n # Apply clip before returning to cut off possible overflows outside the\r\n # intended range due to roundoff error, so that we can absolutely guarantee\r\n # that on output, there are no values > amax or < amin.\r\n return np.clip(s * arr + d, amin, amax)", "def unscale_data(self, data):\n return (data + self.mean)*self.std", "def _normal_log_prob(self, r, scale_log):\n return -(r**2) / 2 - scale_log - self.const", "def standardise(self):\n if self.vector.shape is ():\n return\n if self.dimensionality() != 1:\n # TODO: implement\n raise NotImplementedError\n max_value = 1.0 * max(self.vector)\n if max_value == 0.0:\n # Nothing to do\n return\n self.vector = self.vector.astype('float64') / max_value", "def global_scaling(imgs_arr, axis0_values=None, target=1500):\n if axis0_values is None:\n axis0_values = imgs_arr.mean(axis=1)\n gscaling = target / np.asarray(axis0_values)\n gscaling = gscaling.reshape([gscaling.shape[0]] + [1] * (imgs_arr.ndim - 1))\n return gscaling * imgs_arr", "def fill_matrix(X: np.ndarray, mixture: GaussianMixture) -> np.ndarray:\n shaped_X = X.reshape((X.shape[0],1,X.shape[1])).repeat(mixture.mu.shape[0],axis=1)\n shaped_mu = mixture.mu.reshape((1,mixture.mu.shape[0],mixture.mu.shape[1])).repeat(X.shape[0],axis=0)\n shaped_var = mixture.var.reshape((1,mixture.var.shape[0],1)).repeat(X.shape[0],axis=0)\n shaped_p = mixture.p.reshape((1,mixture.var.shape[0],1)).repeat(X.shape[0],axis=0)\n\n shaped_var_extended = shaped_var.repeat(X.shape[1],axis=2)\n log_N_X = -1/2*np.log(2*np.pi*shaped_var_extended)-(shaped_X-shaped_mu)**2*np.reciprocal(2*shaped_var_extended)\n log_N_X_clean = np.where(shaped_X == 0, shaped_X, log_N_X).sum(axis=2,keepdims=True)\n f = np.log(shaped_p) + log_N_X_clean.sum(axis=2,keepdims=True)\n lse = logsumexp(f,axis=1,keepdims=True).repeat(f.shape[1],axis=1)\n log_post = f-lse\n\n post = np.exp(log_post.reshape((log_post.shape[0],log_post.shape[1])))\n\n n, d = X.shape\n K = mixture.mu.shape[0]\n\n nKd_post = post.reshape(n,K,1).repeat(d,axis=2)\n nKd_mu = mixture.mu.reshape((1,K,d)).repeat(n,axis=0)\n\n predictions = (nKd_mu * nKd_post).sum(axis=1)\n\n return np.where(X == 0, predictions, X)", "def standardize_ts(a, scale=1.0):\n stds = np.std(a, axis=0, keepdims=True)\n stds[stds==0] = 1\n return (a - np.mean(a, axis=0, keepdims=True))/(scale*stds)", "def normalize(arr):\n arr = arr.astype('float')\n # Do not touch the alpha channel\n for i in range(1):\n minval = arr[...,i].min()\n maxval = arr[...,i].max()\n if minval != maxval:\n arr[...,i] -= minval\n arr[...,i] *= (255.0/(maxval-minval))\n return arr", "def transform(self, X):\n X = np.asarray(X, dtype=np.float64)\n X *= self.scale_\n X += self.min_\n return X", "def softmax(arr: np.ndarray, axis: int = -1):\n c = arr.max(axis=axis, keepdims=True)\n s = arr - c\n nominator = np.exp(s)\n denominator = nominator.sum(axis=axis, keepdims=True)\n probs = nominator / denominator\n return probs", "def inverse_transform(self, X, copy=None):\n \n check_is_fitted(self, 'scale_')\n\n copy = copy if copy is not None else self.copy\n\n #X = check_array(X, copy=copy, warn_on_dtype=True,\n # estimator=self, dtype=FLOAT_DTYPES,\n # force_all_finite='allow-nan')\n\n if self.with_mean:\n X += self.mean_\n if self.with_std:\n X *= self.scale_\n return X" ]
[ "0.6439206", "0.6292463", "0.6131833", "0.60969806", "0.60392785", "0.5985598", "0.59760886", "0.59423095", "0.58070093", "0.58033174", "0.5675887", "0.5661247", "0.5655357", "0.5631111", "0.5602358", "0.5597288", "0.5594214", "0.55887544", "0.5582954", "0.55819786", "0.5576093", "0.5569888", "0.5569888", "0.556019", "0.5538468", "0.55213237", "0.5504592", "0.54720014", "0.5456726", "0.5451463", "0.5445431", "0.5440177", "0.5439067", "0.5431354", "0.5430954", "0.54214", "0.5414595", "0.5399789", "0.5392183", "0.5379761", "0.5376889", "0.5372136", "0.5367464", "0.53648067", "0.53600615", "0.53580374", "0.53551936", "0.5342361", "0.53396916", "0.53326553", "0.53290904", "0.53233343", "0.5314534", "0.52997196", "0.52964634", "0.528367", "0.528195", "0.5271062", "0.5263306", "0.5258676", "0.5257441", "0.5250149", "0.52451587", "0.52409655", "0.5230181", "0.52294225", "0.5223077", "0.52203727", "0.5219532", "0.5212131", "0.520805", "0.5205704", "0.5205249", "0.5204591", "0.520222", "0.52021104", "0.51867944", "0.51730317", "0.5167892", "0.5160636", "0.5155683", "0.5154264", "0.51510924", "0.51455414", "0.5143996", "0.51436895", "0.51356226", "0.51340616", "0.5132525", "0.5130079", "0.5125807", "0.51227033", "0.51206845", "0.51196563", "0.5113168", "0.51040095", "0.5103229", "0.5101251", "0.5099654", "0.50920206" ]
0.6378868
1
This method is used to set deposit return and deposit received boolean field accordingly to current Tenancy.
def _compute_payment_type(self): res = super(AccountAnalyticAccount, self)._compute_payment_type() if self._context.get('is_landlord_rent'): for tennancy in self: for payment in self.env['account.payment'].search( [('tenancy_id', '=', tennancy.id), ('state', '=', 'posted')]): if payment.payment_type == 'outbound': tennancy.deposit_received = True if payment.payment_type == 'inbound': tennancy.deposit_return = True return res
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def landlord_button_deposite_received(self):\n payment_id = False\n acc_pay_form = self.env.ref(\n 'account.view_account_payment_form')\n account_jrnl_obj = self.env['account.journal'].search(\n [('type', '=', 'sale')], limit=1)\n payment_obj = self.env['account.payment']\n payment_method_id = self.env.ref(\n 'account.account_payment_method_manual_in')\n for tenancy_rec in self:\n if tenancy_rec.acc_pay_dep_rec_id and \\\n tenancy_rec.acc_pay_dep_rec_id.id:\n return {\n 'view_type': 'form',\n 'view_id': acc_pay_form.id,\n 'view_mode': 'form',\n 'res_model': 'account.payment',\n 'res_id': tenancy_rec.acc_pay_dep_rec_id.id,\n 'type': 'ir.actions.act_window',\n 'target': 'current',\n 'context': self._context,\n }\n if tenancy_rec.deposit == 0.00:\n raise Warning(_('Please Enter Deposit amount.'))\n if tenancy_rec.deposit < 0.00:\n raise Warning(\n _('The deposit amount must be strictly positive.'))\n vals = {\n 'partner_id': tenancy_rec.property_owner_id.parent_id.id,\n 'partner_type': 'customer',\n 'journal_id': account_jrnl_obj.id,\n 'payment_type': 'inbound',\n 'communication': 'Deposit Received',\n 'tenancy_id': tenancy_rec.id,\n 'amount': tenancy_rec.deposit,\n 'property_id': tenancy_rec.property_id.id,\n 'payment_method_id': payment_method_id.id\n }\n payment_id = payment_obj.create(vals)\n return {\n 'view_mode': 'form',\n 'view_id': acc_pay_form.id,\n 'view_type': 'form',\n 'res_id': payment_id and payment_id.id,\n 'res_model': 'account.payment',\n 'type': 'ir.actions.act_window',\n 'nodestroy': True,\n 'target': 'current',\n 'domain': '[]',\n 'context': {\n 'close_after_process': True,\n }\n }", "def landlord_button_deposite_pay(self):\n payment_id = False\n acc_pay_form = self.env.ref(\n 'account.view_account_payment_form')\n account_jrnl_obj = self.env['account.journal'].search(\n [('type', '=', 'purchase')], limit=1)\n payment_obj = self.env['account.payment']\n payment_method_id = self.env.ref(\n 'account.account_payment_method_manual_in')\n for tenancy_rec in self:\n if tenancy_rec.acc_pay_dep_rec_id and \\\n tenancy_rec.acc_pay_dep_rec_id.id:\n return {\n 'view_type': 'form',\n 'view_id': acc_pay_form.id,\n 'view_mode': 'form',\n 'res_model': 'account.payment',\n 'res_id': tenancy_rec.acc_pay_dep_rec_id.id,\n 'type': 'ir.actions.act_window',\n 'target': 'current',\n 'context': self._context,\n }\n if tenancy_rec.deposit == 0.00:\n raise Warning(_('Please Enter Deposit amount.'))\n if tenancy_rec.deposit < 0.00:\n raise Warning(\n _('The deposit amount must be strictly positive.'))\n vals = {\n 'partner_id': tenancy_rec.property_owner_id.parent_id.id,\n 'partner_type': 'supplier',\n 'journal_id': account_jrnl_obj.id,\n 'payment_type': 'outbound',\n 'communication': 'Deposit Received',\n 'tenancy_id': tenancy_rec.id,\n 'amount': tenancy_rec.deposit,\n 'property_id': tenancy_rec.property_id.id,\n 'payment_method_id': payment_method_id.id\n }\n payment_id = payment_obj.create(vals)\n return {\n 'view_mode': 'form',\n 'view_id': acc_pay_form.id,\n 'view_type': 'form',\n 'res_id': payment_id and payment_id.id,\n 'res_model': 'account.payment',\n 'type': 'ir.actions.act_window',\n 'nodestroy': True,\n 'target': 'current',\n 'domain': '[]',\n 'context': {\n 'close_after_process': True,\n }\n }", "def deposit(amt) :\r\n\tglobal bal\r\n\tbal_in = bal\r\n\t#PREMISES FOR NEXT LINE: \r\n\t# (amt >= 0)\r\n\t# (bal >= 0)\r\n\t# (bal == bal_in)\r\n\tbal = bal + amt\r\n\t#PREMISES FOR ATTACHED PROOF, IF ANY: \r\n\t# (bal == (bal_old + amt))\r\n\t# (amt >= 0)\r\n\t# (bal_old >= 0)\r\n\t# (bal_old == bal_in)\r\n\t#PREMISES FOR NEXT LINE: \r\n\t# (amt >= 0)\r", "def accept_transfer(self):\n self.is_accepted = True\n self.date_time_accepted = models.DateTimeField(auto_now=True)", "def deposit(self, cr, uid, ids, amount, context=None):\n record = self.browse(cr, uid, ids, context=context)[0]\n current_amount = record.current_amount\n deposit_amount = record.deposit_amount\n record.write({'current_amount':current_amount + amount,\n 'deposit_amount':deposit_amount + amount })\n return True", "def deposit(self, amount, another_user=None):\n if another_user:\n another_user.deposit(amount)\n self.register_operation(self.ACTIONS['RECEIVING'], amount)\n self.register_operation(self.ACTIONS['TRANSFERING'], amount, another_user)\n else:\n self.__balance = float(Decimal(str(self.__balance + amount)))\n self.register_operation(self.ACTIONS['RECEIVING'], amount)\n\n return True # False is never reached", "def save(self, *args, **kwargs):\n wallet = self.wallet.withdraw(self.value)\n super(Payment, self).save(*args, **kwargs)", "def deposit(self, deposit_money):\r\n self.balance += deposit_money", "def awaiting_payment(self):", "def set(self, boolean):\n self._val = boolean", "def deposit(self, amount):\n self.dep = amount\n self.balance += self.dep", "def deposit(self, account_id: int, amount: float) -> Dict[bool, Accounts]:\n raise Exception(\"Method should be implemented\")", "def __bool__(self):\n return self.balance > 0", "def set_bool_value(self, event):\n\n self.undo_add()\n\n key_list = list(self.patch.engine.misc_data.keys())\n key = key_list[self.selected_index]\n data = self.patch.engine.misc_data[key]\n\n if self.ValueEnabled.GetValue():\n self.patch.misc[key] = data['on']\n else:\n self.patch.misc[key] = data['off']\n\n self.is_modified(True)\n self.misclist_update_row(self.selected_index)", "def test_yes_option_disabled(\n self, wait_tx_settled_mock, confirm_mock, do_transfer_mock\n ):\n password_option = self.get_password_args(self.PASSWORD)\n self.invoke(\n \"transfer\",\n self.LEDGER_ID,\n self.get_address(self.LEDGER_ID, self.PASSWORD),\n \"100000\",\n \"100\",\n *password_option,\n )\n confirm_mock.assert_called_once()", "def on_update(self):\n if self.get('update_request') and not self.is_pending_approval():\n if self.is_revert:\n self.set_as_reverted()\n else:\n self.set_as_success()", "def confirmed(self):", "def test_yes_option_enabled(\n self, wait_tx_settled_mock, confirm_mock, do_transfer_mock\n ):\n password_option = self.get_password_args(self.PASSWORD)\n self.invoke(\n \"transfer\",\n self.LEDGER_ID,\n self.get_address(self.LEDGER_ID, self.PASSWORD),\n \"100000\",\n \"100\",\n \"-y\",\n *password_option,\n )\n confirm_mock.assert_not_called()", "def returnbank(self):\n now = datetime.now()\n if self.__out and self.__signed_in and not self.__returned:\n self.__returned = True\n self.__out = False\n self.__out_info[\"Returned\"] = self.__returned\n self.__out_info[\"Returned_Time\"] = now\n self.__out_log[\"{}/{}/{} {}:{}\".format(now.month, now.day, now.year, now.hour, now.minute)] = self.__out_info", "def enable(self, request):\n if request.data:\n if self.setpoint_valid:\n self.enabled = True\n self.setpoint_valid = True\n return [True,\"DP Controller Enabled\"]\n else:\n rospy.logwarn(\"No valid setpoint received yet.\")\n return [False,\"No valid setpoint received yet.\"]\n else:\n self.enabled = False\n return [False,\"DP Controller Disabled\"]", "def decline(self):\n self.is_active = False\n self.save()", "def decline(self):\n self.is_active = False\n self.save()", "def set_treasury(self) -> None:\n if self.msg.value < 10 ** 22:\n revert(f\"{TAG}: set_treasury method doesnt accept ICX less than 10000 ICX\")\n if self.msg.value % 10 ** 22 != 0:\n revert(f\"{TAG}: Set treasury error, Please send amount in multiples of 10,000 ICX\")\n self._treasury_min.set(self._treasury_min.get() + self.msg.value)\n Logger.debug(f'Increasing treasury minimum by {self.msg.value} to {self._treasury_min.get()}.')\n self._set_bet_limit()\n self._open_treasury.set(False)\n self.FundReceived(self.msg.sender, self.msg.value, f\"Treasury minimum increased by {self.msg.value}\")\n Logger.debug(f'{self.msg.value} was added to the treasury from address {self.msg.sender}', TAG)", "def transferfunds(self):", "def deposit(self, amount):\r\n new_balance = self['get']('balance') + amount\r\n self['set']('balance', new_balance)\r\n return self['get']('balance')", "async def legsubmit(self, ctx):\n\n new_value = await self.toggle_dm_setting(ctx.author.id, \"leg_session_submit\")\n\n if new_value:\n message = f\":white_check_mark: You will now receive DMs when you are a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} \" \\\n f\"and someone submits a Bill or Motion. \" \\\n f\"Note that you will never get a DM when a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} is the one submitting.\"\n else:\n message = f\":white_check_mark: You will no longer receive DMs when you are a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} and someone submits a Bill or Motion.\"\n\n await ctx.send(message)", "def paid(self, paid):\n\n self._paid = paid", "def call_transfer_fund(self):\n ## 1) Create expense line for current student\n ## 2) Create Deposite lines for oney transfer student\n\n ## 1\n student_pool = self.env['op.student']\n partner_obj = self.env['res.partner']\n employee_pool = self.env['hr.employee']\n\n if not self.pin_varification:\n raise except_orm(_('Warning!'),\n _(\"Enter Valid PIN to proceed!\"))\n\n\n student_id = student_pool.search([('user_id', '=', self._uid)])\n\n ## Validate Enter PIN\n if student_id:\n self.validate_current_user_pin(student_id)\n\n expense_vals = {\n 'name': student_id.id,\n 'amount': self.amount_transfer,\n 'date': datetime.datetime.now(),\n 'source': \"Transfer Amount of %s to account no %s (%s) on date %s - %s\" % (self.amount_transfer, self.account_no, self.name, datetime.datetime.now(), self.description),\n 'create_invoice': False,\n # 'student_id': student_id.id,\n }\n\n student_expenses_id = self.env['student.expenses'].sudo().create(expense_vals)\n self.total_expense_balance = student_id.stud_balance_amount\n\n ## Get employee form account id\n employee_id = employee_pool.sudo().search([('ean13', '=', self.account_no)])\n\n ## Search EMployee By Employee ID\n search_by_id_employee_id = employee_pool.sudo().search([('identification_id', '=', self.account_no)])\n\n ## Search by student matrix ID\n search_by_id_student_id = student_pool.sudo().search([('gr_no', '=', self.account_no)])\n\n if not self.account_no:\n ## Logic for search by User Name\n employee_id = self.pass_employee_id.sudo()\n student_id = self.pass_student_id.sudo()\n else:\n ## Get partner form account id\n student_id = student_pool.sudo().search([('ean13', '=', self.account_no)])\n if student_id:\n deposite_vals = {\n 'name': student_id.id,\n # 'amount': self.amount_to_transfer,\n 'paid_amount': self.amount_transfer,\n 'date': datetime.datetime.now(),\n 'create_invoice': True,\n }\n student_deposite_id = self.env['student.deposits'].sudo().create(deposite_vals)\n if not self.account_no:\n trans_student_id = student_id.sudo()\n else:\n trans_student_id = student_pool.sudo().search([('ean13', '=', self.account_no)])\n if trans_student_id:\n self.total_deposite_balance = trans_student_id.stud_balance_amount\n elif employee_id:\n deposite_vals = {\n 'name': employee_id.id,\n 'employee_id': employee_id.identification_id,\n 'paid_amount': self.amount_transfer,\n 'date': datetime.datetime.now(),\n 'create_invoice': True,\n 'source': \"Transfer Amount of %s to account no %s (%s) on date %s - %s \" % (self.amount_transfer, self.account_no, self.name, datetime.datetime.now(), self.description),\n }\n employee_deposite_id = self.env['employee.deposits'].sudo().create(deposite_vals)\n self.total_deposite_balance = employee_id.available_balance\n\n elif search_by_id_employee_id:\n deposite_vals = {\n 'name': search_by_id_employee_id.id,\n 'employee_id': search_by_id_employee_id.identification_id,\n 'paid_amount': self.amount_transfer,\n 'date': datetime.datetime.now(),\n 'create_invoice': True,\n 'source': \"Transfer Amount of %s to account no %s (%s) on date %s - %s \" % (self.amount_transfer, self.account_no, self.name, datetime.datetime.now(), self.description),\n }\n employee_deposite_id = self.env['employee.deposits'].sudo().create(deposite_vals)\n self.total_deposite_balance = search_by_id_employee_id.available_balance\n\n elif search_by_id_student_id:\n deposite_vals = {\n 'name': search_by_id_student_id.id,\n 'employee_id': search_by_id_student_id.gr_no,\n 'paid_amount': self.amount_transfer,\n 'date': datetime.datetime.now(),\n 'create_invoice': True,\n 'source': \"Transfer Amount of %s to account no %s (%s) on date %s - %s \" % (self.amount_transfer, self.account_no, self.name, datetime.datetime.now(), self.description),\n }\n student_deposite_id = self.env['student.deposits'].sudo().create(deposite_vals)\n self.total_deposite_balance = search_by_id_student_id.stud_balance_amount\n\n # return True\n compose_form = self.env.ref('deposite_management.transfer_confirmation_popup_view', False)\n\n try:\n template_id = self.env.ref('deposite_management.email_template_student_fund_transfer', False)\n except ValueError:\n template_id = False\n values = self.env['email.template'].generate_email(template_id.id, self.id)\n\n ## Append Student email id to send mail\n if values and 'email_to' in values:\n values['email_to'] = student_id.sudo().email\n mail_id = self.env['mail.mail'].sudo().create(values)\n if mail_id:\n mail_send_id = mail_id.send()\n\n try:\n template_id_new = self.env.ref('deposite_management.email_template_student_fund_transfer_self_notification', False)\n except ValueError:\n template_id_new = False\n values_new = self.env['email.template'].generate_email(template_id_new.id, self.id)\n ## Append email id to send mail\n if values_new and 'email_to' in values_new:\n if student_id and trans_student_id:\n values_new['email_to'] = trans_student_id.email\n elif employee_id:\n values_new['email_to'] = employee_id.sudo().work_email\n mail_id_new = self.env['mail.mail'].sudo().create(values_new)\n if mail_id_new:\n mail_send_id = mail_id_new.send()\n ## return wizard after click on Fund Transfer Button\n return {\n 'name': _('Fund Transfer Done'),\n 'type': 'ir.actions.act_window',\n 'view_type': 'form',\n 'view_mode': 'form',\n 'res_model': 'fund.confirmation.msg',\n 'view_id': compose_form.id,\n 'target': 'new',\n }", "def _apply_decision(self, decision, responder):\n self.responder = responder\n self.status = decision\n self.decision_datetime = timezone.now()\n self.save()", "def deposit(self, amount):\n self.balance = self.balance + amount\n return self.balance", "def set_tank_valve(is_open):\n global tank_valve_open\n\n tank_valve_open = is_open\n\n # Upload a datapoint with the new position.\n Thread(target=upload_configurations_drm, args=([(STAT_VALVE, int(is_open))],), daemon=True).start()\n\n print_log(\"Tank valve is now {}\".format(\"open\" if tank_valve_open else \"closed\"))\n return AT_VALUE_ENABLED if is_open else AT_VALUE_DISABLED", "def on_update_after_submit(self):\n if self.get('update_request') and not self.is_pending_approval():\n if self.is_revert:\n self.set_as_reverted()\n else:\n self.set_as_success()", "def withdraw(self, amount, description=''):\n state = True\n if not(self.check_funds(amount)):\n state = False\n else:\n self.ledger.append({'amount': -amount, 'description': description})\n return state", "def register_deposit(self, deposit_intent):\n if deposit_intent > 0:\n self.teo.register_deposit(self, deposit_intent)", "def fake_opgepakt(self, value: bool) -> None:\n self._fake_opgepakt = value", "def testDebitTender(self):\n self.setupTransaction()\n if not checkout.pay_card(card_name='Debit'):\n tc_fail(\"Failed to pay with debit tender\")\n # This is an attempt to prevent PHYK-85 from happening\n self.setupTransaction()\n if not checkout.pay_card(card_name='Debit'):\n tc_fail(\"Failed to pay with debit tender\")\n self.handleMessages()", "def toggle_dropable(self,new_bool):\n self.dropablee = new_bool", "def deposit(self, amount):\n self.balance += amount", "def deposit(self, amount):\n self.balance += amount", "def withdrawn(self, withdrawn):\n\n self._withdrawn = withdrawn", "def deposit(self, amount):\n\n print(\"\\nDeposit - {self.name}\".format(self=self))\n\n # checks for negative amount value\n if amount < 0:\n print(\"Cannot deposit £{0:.2f}\".format(amount))\n print(\"Deposit amount cannot be a negative value.\")\n \n # adds amount to account balance\n else:\n self.balance += amount\n print(\"{0} has deposited £{1:.2f}. New balance is £{2:.2f}\".format(self.name, amount, self.balance))", "def deposit(self, amount) -> None:\n self._balance += amount\n return None", "def deposit(account, amount):\n pass", "def prepayment_deposited(self):\n return 'prepayment_deposited' if self.is_fully_paid() else 'awaiting_payment'", "def validate(self,admin,bal_org,bal_dst):\n\n rv=admin.helper.setAmount(admin.userName,self.org,bal_org)\n if rv!= None:\n rv=admin.helper.setAmount(admin.userName,self.dst,bal_dst)\n if rv != None:\n return True\n else:\n return False", "def set_flag(self, new):\n self.flag = new", "def deposit(self, amount):\n self.balance += amount\n self.transactions.append((\"Deposit\", amount))\n print \"Your new balance is $%d.\" % self.balance", "def dr_approve(self):\n print \"DR approved this form. Current state:\", self.state", "def boolean(self, boolean):\n\n self._boolean = boolean", "def on_Deposit_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def deposit(self, amount):\n message = self.account.deposit(float(amount))\n if message:\n return message\n else:\n self.myView.displayAccount()\n return \"success\"", "def approve(self):\n self.approved = True\n self.quest_node['approved'] = True\n graph.push(self.quest_node)\n self.payout()", "def toggle_pick_upable(self,new_bool):\n self.pick_upable = new_bool", "def makebank(self, amount):\n if self.__returned and not self.__made:\n self.__amount = amount\n self.__made = True", "def transfer(self, amount, budget_cat):\n state = True\n if not (self.check_funds(amount)):\n state = False\n else:\n self.withdraw(amount, 'Transfer to '+str(budget_cat.name))\n budget_cat.deposit(amount, 'Transfer from '+str(self.name))\n return state", "def PerformSTP(self, settlement):\n additional_info_field = 'Call_Confirmation'\n additional_info_value = 'SBLManualRelease'\n settlement.AddInfoValue(additional_info_field, additional_info_value)\n LOGGER.info(\"Auto-setting Call_Confirmation for Settlement with id {settlement}\".format(settlement=settlement.Oid()))\n settlement.Commit()", "def switch(self):\n if self.set == self.funds_set:\n self.set = self.portfolios_set\n else:\n self.set = self.funds_set", "def _onchange_field(self):\n if not self.secretary_contact_id:\n return\n if self.partner_type in ['dr', 'patient', 'secretary']:\n self.update({\n 'secretary_contact_id': False\n })", "def deposit(self, amount):\n self.balance += amount\n return self.balance", "def deposit(self, amount):\n self.balance += amount\n return self.balance", "def deposit(self, amount):\n self.balance += amount\n return self.balance", "def on_VDepositValue_editingFinished(self):\n # TODO: not implemented yet\n # raise NotImplementedError\n value = self.VDepositValue.text()\n if JudgeStr2Float(value):\n self.depositValue = float(value)\n print(f\"Validator Deposit with {value} MAN\")\n else:\n self.vdepositValue = 0\n self.VDepositValue.clear()\n # self.VDepositValue.setFocus()", "def toggle(self, *_):\r\n \r\n global ac\r\n if self.author_f_ent.var.get():\r\n self.add_a['state'] = 'normal'\r\n elif self.author_m_ent.var.get():\r\n self.add_a['state'] = 'normal'\r\n elif self.author_l_ent.var.get():\r\n self.add_a['state'] = 'normal'\r\n else:\r\n self.add_a['state'] = 'disabled'", "def deposit(self, amount):\n self.__balance += amount\n return self.__balance", "def setGatingOverride(self, channel, isEnabled, unitCode=0):\n resp = self.XAPCommand('GOVER', channel, (1 if isEnabled else 0), unitCode=unitCode)\n return bool(int(resp))", "def set_export_touchstone(self, activate, export_dir=\"\"):\n settings = []\n if activate:\n settings.append(\"NAME:options\")\n settings.append(\"ExportAfterSolve:=\")\n settings.append(True)\n settings.append(\"ExportDir:=\")\n settings.append(export_dir)\n elif not activate:\n settings.append(\"NAME:options\")\n settings.append(\"ExportAfterSolve:=\")\n settings.append(False)\n self.odesign.DesignOptions(settings, 0)\n return True", "def set_bribe(self, bribe_amount):\r\n self.bribe = bribe_amount", "def deposit(self, account_number: int, deposit: float): \n self._accounts[account_number][1] += deposit", "async def legwithdraw(self, ctx):\n\n new_value = await self.toggle_dm_setting(ctx.author.id, \"leg_session_withdraw\")\n\n if new_value:\n message = f\":white_check_mark: You will now receive DMs when you are a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} and someone withdraws their Bill or Motion. \" \\\n f\"Note that you will never get a DM when a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} is the one withdrawing.\"\n\n else:\n message = f\":white_check_mark: You will no longer receive DMs when you are a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} and someone withdraws their Bill or Motion.\"\n\n await ctx.send(message)", "def update_amounts(self, save=True):\n self.amount_donated = self.get_amount_total(\n [StatusDefinition.SUCCESS, StatusDefinition.PENDING,\n StatusDefinition.PLEDGED])\n self.amount_needed = self.amount_asked - self.amount_donated\n\n if self.amount_needed < 0:\n # Should never be less than zero\n self.amount_needed = 0\n\n if save:\n self.save()", "def commitPrivateValueSet(self, data):\n\n self._log(\"commit-private-value-set\").debug4(\"%s: commit data - %s\", self.name, data)\n\n self.runningEnabled = self.candidateEnabled \n self.runningTechMode = self.candidateTechMode \n self.mibIfIndex = data.mibIfIndex\n self.shouldSendGratuitousArp = data.sendGratuitousArp\n self.deliveryStabilityDelay = data.configurationDelay \n\n # mute reporting was changed\n if self.muteReporting != data.muteReporting:\n self.muteReporting = data.muteReporting\n\n if self.connectivityCheck:\n self.connectivityCheck.setMuteReporting(self.muteReporting)\n\n self._log(\"interface-mute-reporting\").notice(\"interface %s mute reporting was changed to %s\", self.name, data.muteReporting)\n\n return ReturnCodes.kOk", "def set_as_reverted(self):\n self.is_revert = False\n self.update_request.status = 'Reverted'", "def save(self, *args, **kwargs):\n if not self.pk:\n self.start_time_rent = datetime.date.today()\n self.end_time_rent = self.start_time_rent + datetime.timedelta(days=7)\n self.reservation.isrented = True\n self.reservation.save()\n return super(Rental, self).save(*args, **kwargs)", "def setTrue(self):\n self.cond = CT.TRUE\n self.left = self.right = None\n self.z3 = BoolSort().cast(True)\n self.cleaned = self.Z3Simplified = self.customSimplified = self.checked = True\n self.customSimplifiedValue = CE.TRUE", "def decide(self) :\n (self.futurX,self.futurY) = self.randomNextPos()\n if self.fishBreedTimeCPT == 0 :\n self.naissance = True\n self.fishBreedTimeCPT = self.fishBreedTime\n else :\n self.fishBreedTimeCPT = self.fishBreedTimeCPT - 1\n\n if self.env.grille[self.futurY][self.futurX] == None :\n self.bougera = True\n else :\n self.bougera = False\n\n self.update()", "def settle_self(self):\n self.state = 'completed'\n self.save()\n self.safe_post()", "def updateParameters(self, parameters):\n if parameters[1].value == True:\n parameters[2].enabled = True;\n else:\n parameters[2].enabled = False; \n \n if parameters[3].value == True:\n parameters[4].enabled = True;\n \n else:\n parameters[4].enabled = False; \n return", "def action_set_done(self):\n self.ensure_one()\n self.write({\"state\": \"done\"})\n self.credit_control_line_ids.write({\"state\": \"done\"})\n return True", "def setDegreesFlag(newValue):\n global DegreesFlag\n DegreesFlag = newValue", "def deposit(cls, amount):\n if amount >= 0 and cls.is_logged_in():\n cls.__current_acct.__transaction(amount)\n else:\n print('deposit error')", "def return_change(self):\n\n if self.amount > 0:\n custom_log(f\"Change to return : €{self.amount}\", MSG_DEBUG)\n else:\n custom_log(\"No change to return.\", MSG_DEBUG)", "def true_damage_dealt(self, true_damage_dealt):\n\n self._true_damage_dealt = true_damage_dealt", "def set_answer(self) -> None:\n if self.check_ans:\n self.check_ans = False\n else:\n self.check_ans = True", "def reset_treatment(self, params):\n self.ART = False\n self.PREP = False \n self._Disease = params['Disease']", "def get_bool2(self):\n pass", "def controlled(self):\n if self.crypt_private is not None and self.sign_private is not None:\n return True\n else:\n return False", "def set_option_post_form(self, boolean, apikey=''):\n return six.next(six.itervalues(self.zap._request(self.zap.base + 'spider/action/setOptionPostForm/', {'Boolean': boolean, 'apikey': apikey})))", "def action_anular(self):\n self.write({'state': 'draft'})\n return True", "def deposit_money_check(amt):\r\n global balance_money\r\n print(\"Deposit money is : \", amt)\r\n balance_money = balance_money + amt", "def is_returned(self):\n if self.isreturned is True and self.return_helper is False:\n self.reservation.cars.quantity += 1\n self.return_helper = True\n self.save()\n self.reservation.cars.save()\n return self.isreturned\n else:\n return self.isreturned", "def set_boolean(x):\n\n if x:\n return \"True\"\n else:\n return \"False\"", "def on_MDepositValue_editingFinished(self):\n # TODO: not implemented yet\n # raise NotImplementedError\n value = self.MDepositValue.text()\n if JudgeStr2Float(value):\n self.mdepositValue = float(value)\n print(f\"Validator Deposit with {value} MAN\")\n else:\n self.mdepositValue = 0\n self.MDepositValue.clear()\n # self.MDepositValue.setFocus()", "def set_is_watering(valve: Valve, value: bool) -> None:\n valve.is_watering = value", "def withdraw(self, responder):\n self._apply_decision(self.Status.WITHDRAWN, responder)", "def PerformSTP(self, settlement):\n LOGGER.info(\"Loan Settlements are not yet Settled, Auto Holding Return Settlement\")\n LOGGER.info(\"Auto Holding Settlement with id {settlement}\".format(settlement=settlement))\n OperationsSTPFunctions.hold_settlement(settlement)", "def deposit(self, amount):\n self.transactions += [('deposit', amount)]\n self.balance = self.balance + amount\n return self.balance", "def autoExposureChk(self, state):\n if state == Qt.Checked and self.kinect.kinectConnected == True:\n self.kinect.toggleExposure(True)\n else:\n self.kinect.toggleExposure(False)", "def deposit(self, amount=None):\n if amount is None:\n amount = random() * 1000\n acct_info = {\"account_num\": choice(TRANSACTION_ACCT_LIST),\n \"routing_num\":\"111111111\"}\n transaction = {\"account\": json.dumps(acct_info),\n \"amount\": amount,\n \"uuid\": generate_username()}\n with self.client.post(\"/deposit\",\n data=transaction,\n catch_response=True) as response:\n if response.url is None or \"failed\" in response.url:\n response.failure(\"deposit failed\")", "def _setForBinding (self, value):\n if not isinstance(value, bool):\n raise TypeError(value)\n self.__forBinding = value\n return value", "def safeWithdrawal(self):\n if self._after_dead_line():\n # each contributor can withdraw the amount they contributed if the goal was not reached\n if not self._funding_goal_reached.get():\n amount = self._balances[self.msg.sender]\n self._balances[self.msg.sender] = 0\n if amount > 0:\n if self.icx.send(self.msg.sender, amount):\n self.FundTransfer(self.msg.sender, amount, False)\n Logger.debug(f'FundTransfer({self.msg.sender}, {amount}, False)', TAG)\n else:\n self._balances[self.msg.sender] = amount\n\n # The sales target has been met. Owner can withdraw the contribution.\n if self._funding_goal_reached.get() and self._addr_beneficiary.get() == self.msg.sender:\n if self.icx.send(self._addr_beneficiary.get(), self._amount_raised.get()):\n self.FundTransfer(self._addr_beneficiary.get(), self._amount_raised.get(), False)\n Logger.debug(f'FundTransfer({self._addr_beneficiary.get()},'\n f'{self._amount_raised.get()}, False)', TAG)\n # reset amount_raised\n self._amount_raised.set(0)\n else:\n # if the transfer to beneficiary fails, unlock contributors balance\n Logger.debug(f'Failed to send to beneficiary!', TAG)\n self._funding_goal_reached.set(False)" ]
[ "0.5757384", "0.5666821", "0.563632", "0.5451328", "0.5437696", "0.54078245", "0.539652", "0.53773105", "0.5311555", "0.5225933", "0.52221566", "0.5212389", "0.5212031", "0.5210332", "0.5194012", "0.5192315", "0.51765287", "0.5168921", "0.51624215", "0.5126235", "0.51125246", "0.51125246", "0.509263", "0.50891376", "0.5088664", "0.5087274", "0.50863683", "0.50747067", "0.5070884", "0.5064492", "0.5052933", "0.5037505", "0.5024747", "0.5020144", "0.501903", "0.50184023", "0.50150156", "0.50087565", "0.50087565", "0.5006905", "0.5000495", "0.4994662", "0.4983263", "0.49779513", "0.49736977", "0.49693438", "0.4968007", "0.49600324", "0.49500164", "0.4941365", "0.49408555", "0.49225986", "0.4916805", "0.49159172", "0.49035314", "0.48866734", "0.48847583", "0.4883281", "0.48823425", "0.48823425", "0.48823425", "0.4880237", "0.48656258", "0.4862008", "0.48571706", "0.48531598", "0.48469013", "0.48432487", "0.48410776", "0.48373896", "0.4836182", "0.4832843", "0.48206732", "0.48126945", "0.48082653", "0.48060602", "0.48049912", "0.4798342", "0.47892928", "0.478902", "0.47860274", "0.47842562", "0.4782682", "0.47822386", "0.47807878", "0.47796646", "0.47655967", "0.47650903", "0.47588736", "0.475724", "0.47557744", "0.4749521", "0.47472316", "0.47451824", "0.4744291", "0.4744129", "0.47355565", "0.472786", "0.47272757", "0.4727132" ]
0.55960554
3
This button method is used to create rent schedule Lines.
def create_rent_schedule_landlord(self): rent_obj = self.env['tenancy.rent.schedule'] for tenancy_rec in self: amount = tenancy_rec.landlord_rent if tenancy_rec.rent_type_id.renttype == 'Weekly': d1 = tenancy_rec.date_start d2 = tenancy_rec.date interval = int(tenancy_rec.rent_type_id.name) if d2 < d1: raise Warning( _('End date must be greater than start date.')) wek_diff = (d2 - d1) wek_tot1 = (wek_diff.days) / (interval * 7) wek_tot = (wek_diff.days) % (interval * 7) if wek_diff.days == 0: wek_tot = 1 if wek_tot1 > 0: for wek_rec in range(wek_tot1): rent_obj.create( { 'start_date': d1, 'amount': amount * interval or 0.0, 'property_id': tenancy_rec.property_id and tenancy_rec.property_id.id or False, 'tenancy_id': tenancy_rec.id, 'currency_id': tenancy_rec.currency_id.id or False, 'rel_tenant_id': tenancy_rec.tenant_id.id }) d1 = d1 + relativedelta(days=(7 * interval)) if wek_tot > 0: one_day_rent = 0.0 if amount: one_day_rent = (amount) / (7 * interval) rent_obj.create({ 'start_date': d1.strftime( DEFAULT_SERVER_DATE_FORMAT), 'amount': (one_day_rent * (wek_tot)) or 0.0, 'property_id': tenancy_rec.property_id and tenancy_rec.property_id.id or False, 'tenancy_id': tenancy_rec.id, 'currency_id': tenancy_rec.currency_id.id or False, 'rel_tenant_id': tenancy_rec.tenant_id.id }) elif tenancy_rec.rent_type_id.renttype != 'Weekly': if tenancy_rec.rent_type_id.renttype == 'Monthly': interval = int(tenancy_rec.rent_type_id.name) if tenancy_rec.rent_type_id.renttype == 'Yearly': interval = int(tenancy_rec.rent_type_id.name) * 12 d1 = tenancy_rec.date_start d2 = tenancy_rec.date diff = abs((d1.year - d2.year) * 12 + (d1.month - d2.month)) tot_rec = diff / interval tot_rec2 = diff % interval if abs(d1.month - d2.month) >= 0 and d1.day < d2.day: tot_rec2 += 1 if diff == 0: tot_rec2 = 1 if tot_rec > 0: tot_rec = int(tot_rec) for rec in range(tot_rec): rent_obj.create({ 'start_date': d1.strftime( DEFAULT_SERVER_DATE_FORMAT), 'amount': amount * interval or 0.0, 'property_id': tenancy_rec.property_id and tenancy_rec.property_id.id or False, 'tenancy_id': tenancy_rec.id, 'currency_id': tenancy_rec.currency_id.id or False, 'rel_tenant_id': tenancy_rec.tenant_id.id }) d1 = d1 + relativedelta(months=interval) if tot_rec2 > 0: rent_obj.create({ 'start_date': d1.strftime(DEFAULT_SERVER_DATE_FORMAT), 'amount': amount * tot_rec2 or 0.0, 'property_id': tenancy_rec.property_id and tenancy_rec.property_id.id or False, 'tenancy_id': tenancy_rec.id, 'currency_id': tenancy_rec.currency_id.id or False, 'rel_tenant_id': tenancy_rec.tenant_id.id }) return self.write({'rent_entry_chck': True})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_line(self, buttoninstance):\r\n #del and create again to respect the order\r\n self.ids.inlayout.remove_widget(self.add_button)\r\n self.ids.inlayout.remove_widget(self.del_button)\r\n #create the new line\r\n store = get_store()\r\n lastval = store.get('Nbtimecompound')[\"value\"]\r\n store.put('Nbtimecompound', value=1+lastval)\r\n self.ids.inlayout.rows = 5 + store.get('Nbtimecompound')[\"value\"]\r\n #add the widget\r\n newval = str(store.get('Nbtimecompound')[\"value\"])\r\n timecompount = CEToolBoxLabel(text=\"Time compound \"+newval)\r\n timecompountvalue = CEToolBoxTextInput(text=str(1.0),\r\n id='Timecompound'+newval)\r\n timecompountunit = CEToolBoxSpinner(text=u\"min\",\r\n id='Timecompound'+newval+'Unit', \r\n values=[\"s\", \"min\"])\r\n store.put('Timecompound'+newval, value=1.0, unit=\"min\")\r\n self.ids.inlayout.add_widget(timecompount)\r\n self.ids.inlayout.add_widget(timecompountvalue)\r\n self.ids.inlayout.add_widget(timecompountunit)\r\n tosave = [timecompount, timecompountvalue, timecompountunit]\r\n self.timecompoundlist.append(tosave)\r\n #recreate the button\r\n self.add_button = CEToolBoxButton(text=\"Add\", id=\"addbutton\", on_release=self.add_line)\r\n self.ids.inlayout.add_widget(self.add_button)\r\n self.del_button = CEToolBoxButton(text=\"Del\", id=\"delbutton\", on_release=self.del_line)\r\n self.ids.inlayout.add_widget(self.del_button)\r\n self.ids.inlayout.rows = 5 + store.get('Nbtimecompound')[\"value\"]\r\n #force the good size\r\n self.ids.tscrollview.change_child_height(self.ids.tscrollview.height)", "def display_round_creation(self) -> None:\n labels = [value for value in cf.LABELS_ROUND_CREATION.values()]\n for index, elem in enumerate(labels):\n self.widgets.append(self.my_line2(self.master, elem, index, 0, 1, 1, 10, 10))\n self.widgets.append(self.my_button(self.master, 'créer le tour', 1, len(labels), self.create_new_round))", "def constructTimeLineItem(self):\n\t\treturn", "def create( self ):\r\n for rsrc in self.ee.getRsrcs( ):\r\n self.schedule[rsrc.getid( )] = [ ]", "def create(self, box, row):\n lbl = QLabel(\"-or-\")\n box.addWidget(lbl, row, 0)\n create = QPushButton(\"Create/Edit station list\")\n create.clicked.connect(self.createRSL)\n create.setToolTip('Create a list of stations for which to download ' +\n 'soundings')\n box.addWidget(create, row, 1, 1, 2)", "def _create_schedules(self):\n\n ''''''", "def createLinesWidgets(self):\r\n lineEditWidgets = {\r\n \"FECHA\": (0, 1, 1, 2),\r\n \"HORA\": (1, 1, 1, 2),\r\n \"NOMBRE\": (2, 1, 1, 2),\r\n \"CUMPLEAÑOS\": (3, 1),\r\n \"CELULAR\": (4, 1, 1, 2),\r\n \"DIRECCIÓN\": (5, 1, 1, 2),\r\n \"POLLO\": (9, 1, 1, 2),\r\n \"CARNE\": (10, 1, 1, 2),\r\n \"EMPANACHOS\": (11, 1, 1, 2),\r\n \"CANTIDAD DE EMPA\": (12, 1, 1, 2),\r\n \"VALOR\": (13, 1),\r\n\r\n }\r\n self.lineEditWidgets = {}\r\n for widgetName, position in lineEditWidgets.items():\r\n if len(position) == 2:\r\n self.lineEdit = QLineEdit()\r\n self.lineEdit.setReadOnly(True)\r\n\r\n self.lineEdit.setStyleSheet(\"\"\"\r\n font-family: times;\r\n font-size: large;\r\n background-color : #A8DBC5;\r\n border: 2px solid white;\r\n border-radius: 5px;\r\n font-size: 15px;\r\n \"\"\")\r\n self.grid.addWidget(self.lineEdit, position[0], position[1])\r\n self.lineEditWidgets[widgetName] = self.lineEdit\r\n else:\r\n self.lineEdit = QLineEdit()\r\n\r\n self.lineEdit.setStyleSheet(\"\"\"\r\n font-family: times;\r\n font-size: large;\r\n background-color : #A8DBC5;\r\n border: 2px solid white;\r\n border-radius: 5px;\r\n font-size: 15px;\r\n \"\"\")\r\n if widgetName == \"CANTIDAD DE EMPA\":\r\n self.lineEdit.setReadOnly(True)\r\n self.grid.addWidget(\r\n self.lineEdit, position[0], position[1], position[2], position[3])\r\n self.lineEditWidgets[widgetName] = self.lineEdit\r\n\r\n # Getting the current date and setting its label\r\n self.today = str(date.today())\r\n self.lineEditWidgets[\"FECHA\"].setText(self.today)\r\n # Using QTimer and its signal to actualize the hour Widget\r\n timer = QTimer(self.tab1)\r\n timer.timeout.connect(lambda: self.getHour(self.tab1))\r\n timer.start(1000)", "def marathon(self):\n self.buttons = []\n self.screen.blit(self.background_image, (0, 0))\n button_height = 200\n button_width = 200\n row_height = self.height // 2 - button_height\n row_starting_width = self.width // 10\n # First line of buttons\n for i in range(5):\n self.create_button((row_starting_width * (3 + (i - 1) * 2) - 100, row_height),\n button_width,\n button_height,\n Colors.BLACK,\n str(i))\n # Second line of buttons\n row_height = row_height + button_height + 100\n for i in range(5):\n self.create_button((row_starting_width * (3 + (i - 1) * 2) - 100, row_height),\n button_width,\n button_height,\n Colors.BLACK,\n str(i + 5))\n self.show_buttons()\n self.show_text_in_buttons()\n pygame.display.flip()", "def generate_rapel(self):\n config = self.env['ka_hr_payroll.config'].default_config()\n last_period = self.get_last_period(self.status_id.id, self.company_payroll_id.id, config=config)\n if last_period:\n date_done = datetime.strptime(self.date_done, DATETIME_FORMAT)\n\n if date_done.day > config.date_end:\n date_pay = date_done + relativedelta(months=1)\n else:\n date_pay = date_done\n\n data_rapel = {\n 'new_period_id': self.id,\n 'old_period_id': last_period.id,\n 'date_start': get_utc_timezone(self.date_start + ' 00:00:00'),\n 'date_end': self.date_done,\n 'year_pay': str(date_pay.year),\n 'month_pay': date_pay.month,\n 'status_id': self.status_id.id,\n 'company_payroll_id': self.company_payroll_id.id,\n }\n\n rapel_period = self.env['ka_hr_payroll.rapel.tunjangan.khusus.period'].create(data_rapel)\n self.rapel_id = rapel_period\n\n for line in self.line_ids:\n line.generate_rapel(last_period.id, rapel_period.id)\n\n self.state_rapel = '2'\n self.env.user.notify_info(\"{0}, berhasil dibuat!\".format(rapel_period.name))\n else:\n raise ValidationError(\n \"Tunjangan khusus periode sebelumnya tidak ditemukan! Anda tidak bisa melanjutkan aksi ini.\")", "def newInsertionNeedleSet(self):\n #productive #onButton\n profbox()\n widget = slicer.modules.NeedleFinderWidget\n if widget.newInsertionButton:\n dialog = qt.QDialog()\n messageBox = qt.QMessageBox.information( dialog, 'Information','You are creating a new set of needles')\n self.round +=1\n widget.newInsertionButton.setText('Start a new set of needles - Round ' + str(self.round+1)+'?')\n widget.deleteNeedleButton.setText('Delete Needles from round ' + str(self.round))", "def schedule_paragraph():", "def push_button_add_date_clicked(self) -> None:\n date_creator = DateCreatorWindow(self)\n while True:\n date_creator.exec_()\n create_date = date_creator.get_date()\n if create_date is not None:\n try:\n self._dates.add_date(create_date)\n self.update_list_widget_date()\n break\n except InvalidDatePair as ex:\n QMessageBox.critical(self,\n self.tr(\"Invalid date pair\"),\n str(ex))\n else:\n break", "def createRSL(self):\n self.creator = RSLCreator(self.request, self.log)\n self.creator.show()", "def create_month_scr(self, month, toogle_today=False):\n\n scr = Screen()\n m = self.month_names_eng[self.active_date[1] - 1]\n scr.name = \"%s-%s\" % (m, self.active_date[2]) # like march-2015\n\n # Grid for days\n grid_layout = GridLayout(cols=7, rows=7, size_hint=(1, 1), pos_hint={\"top\": 1})\n scr.add_widget(grid_layout)\n\n # Days abbrs\n for i in range(7):\n if i >= 5: # weekends\n l = Label(text=self.days_abrs[i], color=(1, 0, 0, 1))\n else: # work days\n l = Label(text=self.days_abrs[i], text_size=(self.size[0], None), halign=\"center\")\n\n grid_layout.add_widget(l)\n\n global holiday, halfday\n\n # Buttons with days numbers\n for week in month:\n for day in week:\n if day[1] >= 6: # weekends\n self.tbtn = ToggleBtn(text=str(day[0]), color=(0, 0, 0, 1))\n else:\n self.tbtn = ToggleBtn(text=str(day[0]), color=(0, 0, 0, 1))\n for i in range(len(holiday)):\n if self.active_date[2] == holiday[i][2]:\n if self.active_date[1] == holiday[i][1]:\n if day[0] == holiday[i][0]:\n self.tbtn.background_color=(128, 0, 128, 1)\n for i in range(len(halfday)):\n if self.active_date[2] == halfday[i][2]:\n if self.active_date[1] == halfday[i][1]:\n if day[0] == halfday[i][0]:\n self.tbtn.background_color=(0, 255, 255, 0.5)\n\n self.tbtn.bind(on_press=self.get_btn_value)\n\n if toogle_today:\n # Down today button\n if day[0] == self.active_date[0] and day[2] == 1:\n self.tbtn.state = \"down\"\n # Disable buttons with days from other months\n if day[2] == 0:\n self.tbtn.text = \" \"\n self.tbtn.disabled = True\n self.tbtn.background_color = (0, 0, 0, 0.1)\n\n grid_layout.add_widget(self.tbtn)\n\n self.sm.add_widget(scr)", "def generate_rapel(self, last_period_id, rapel_period_id):\n last_lines = self.get_last_lines(last_period_id, self.combine_id.id)\n data_rapel_lines = {\n 'rapel_period_id': rapel_period_id,\n 'new_period_lines_id': self.id,\n 'new_value': self.value,\n 'combine_id': self.combine_id.id,\n }\n\n if last_lines:\n data_rapel_lines['old_period_lines_id'] = last_lines.id\n data_rapel_lines['old_value'] = last_lines.value\n\n self.env['ka_hr_payroll.rapel.tunjangan.khusus.period.lines'].create(data_rapel_lines)", "def newInsertionNeedleSet(self):\r\n # productive #onButton\r\n profbox()\r\n widget = slicer.modules.NeedleFinderWidget\r\n if widget.newInsertionButton:\r\n dialog = qt.QDialog()\r\n messageBox = qt.QMessageBox.information(dialog, 'Information', 'You are creating a new set of needles')\r\n self.round += 1\r\n widget.newInsertionButton.setText('Start a new set of needles - Round ' + str(self.round + 1) + '?')\r\n widget.deleteNeedleButton.setText('Delete Needles from round ' + str(self.round))", "def btn_create_reco(self):\n\t\tprint()\n\t\tprint('OH - btn_create_reco')\n\n\t\t# Init\n\t\tres_id = self.id\n\t\tres_model = _model_treatment\n\t\tview_id = self.env.ref('openhealth.treatment_2_form_view').id\n\n\t\t# Open\n\t\treturn {\n\t\t\t# Mandatory\n\t\t\t'type': _model_action,\n\t\t\t'name': 'Open Treatment Current',\n\t\t\t# Window action\n\t\t\t'priority': 1,\n\t\t\t'res_id': res_id,\n\t\t\t'res_model': res_model,\n\t\t\t#'view_id': view_id,\n\t\t\t# Views\n\t\t\t#\"views\": [[False, \"form\"]],\n\t\t\t\"views\": [[view_id, \"form\"]],\n\t\t\t'view_mode': 'form',\n\t\t\t'target': 'current',\n\t\t\t#\"domain\": [[\"patient\", \"=\", self.patient.name]],\n\t\t\t#'auto_search': False,\n\t\t\t'flags': {\n\t\t\t\t\t\t#'form': {'action_buttons': True, 'options': {'mode': 'edit'}}\n\t\t\t\t\t\t'form': {'action_buttons': False, }\n\t\t\t\t\t},\n\t\t\t'context': {\n\t\t\t\t\t\t#'default_treatment': treatment_id,\n\t\t\t\t\t}\n\t\t}", "def action_generate_lines_txt(self):\n rp_obj = self.env['res.partner']\n voucher_obj = self.env['account.wh.iva']\n txt_iva_obj = self.env['txt.iva.line']\n vouchers = []\n txt_brw = self.browse(self._ids)[0]\n txt_ids = txt_iva_obj.search([('txt_id', '=', txt_brw.id)])\n if txt_ids:\n txt_ids.unlink()\n\n if txt_brw.type:\n vouchers = voucher_obj.search([\n ('date_ret', '>=', txt_brw.date_start),\n ('date_ret', '<=', txt_brw.date_end),\n ('period_id', '=', txt_brw.period_id.id),\n ('state', '=', 'done'),\n ('type', 'in', ['in_invoice', 'in_refund'])])\n else:\n vouchers = voucher_obj.search([\n ('date_ret', '>=', txt_brw.date_start),\n ('date_ret', '<=', txt_brw.date_end),\n ('period_id', '=', txt_brw.period_id.id),\n ('state', '=', 'done'),\n ('type', 'in', ['out_invoice', 'out_refund'])])\n\n for voucher in vouchers:\n acc_part_id = rp_obj._find_accounting_partner(voucher.partner_id)\n for voucher_lines in voucher.wh_lines:\n if voucher_lines.invoice_id.state not in ['open', 'paid']:\n continue\n for voucher_tax_line in voucher_lines.tax_line:\n txt_iva_obj.create(\n {'partner_id': acc_part_id.id,\n 'voucher_id': voucher.id,\n 'invoice_id': voucher_lines.invoice_id.id,\n 'txt_id': txt_brw.id,\n 'untaxed': voucher_tax_line.base,\n 'amount_withheld': voucher_tax_line.amount_ret,\n 'tax_wh_iva_id': voucher_tax_line.id,\n })\n return True", "def placeCalendarButton(data,row,target,path,alts,**kwargs):\n# printPretty(\"args: %s %s %s %s\" % (data,row,target,path))\n datebut = gtk.Button()\n datebut.show()\n image = gtk.Image()\n image.set_from_file(\"img/date.png\")\n datebut.set_image(image)\n datebut.unset_flags(gtk.CAN_FOCUS)\n datebut.connect(\"clicked\",dateChoose,target,data,path,alts,kwargs)\n datebut.set_tooltip_text(\"Click to choose date from calendar\")\n row.pack_start(datebut,0,0,2)", "def create(self, vals):\n lines = super(KaHrPayrollTunjanganKhususPeriodLines, self).create(vals)\n if not 'name' in vals or not vals.get('name'):\n date_obj = datetime.strptime(lines.period_id.date_start, DATE_FORMAT)\n date_str = date_obj.strftime('%d-%m-%Y')\n lines.name = \"Detail {0}. Periode: {1}\".format(lines.combine_id.name, date_str)\n return lines", "def __createButton(self):\r\n self.button = QPushButton(\"Plot\") # text diplayed on the button\r\n self.button.setShortcut(\"Ctrl+P\") # adding a shortcut \r\n self.button.clicked.connect(self.__onClick) # connect it to the __onClick function\r", "def makeRoastTabs(ID, numbers):\n newFromTemplate(ID, TODAY)\n continueFromLastPR(ID, 1)\n newFromTemplate(ID, TOMORROW)\n populateBatches(ID, 2, numbers)", "def _createline(self):\n return self.cv.create_line(0, 0, 0, 0, fill=\"\", width=2,\n capstyle = TK.ROUND)", "def _btnClicked(self):\n # Check if all fields were filled\n name = self.nameEntry.get().strip()\n start = self.startCombo.get()\n end = self.endCombo.get()\n day = self.dayCombo.get()\n type = self.typeCombo.get()\n\n # Check missing values\n if name == \"\" or start == \"\" or end == \"\" or day == \"\" or type == \"\":\n messagebox.showwarning(\"Missing fields!\", \"Please fill the missing fields!\")\n return\n if type in Constants.TASKS[0:3]:\n subject = self.subjectCombo.get()\n if subject == \"\":\n messagebox.showwarning(\"Missing fields!\", \"Please fill the missing fields!\")\n return\n else:\n subject = None\n\n # Check if starts earlier than end\n if Time.TIMELIST.index(start) >= Time.TIMELIST.index(end):\n messagebox.showwarning(\"Time error\", \"Make sure that time is correct!\")\n return\n\n # Check time\n started = False\n index = 0\n for t in Time.HOURS:\n # Create an event for each hour\n if t[0:5] == start: started = True\n if t[8:] == end:\n started = False\n # Add one last event\n if subject == \"\":\n event = Event(name, start, end, day, type)\n else:\n event = Event(name, start, end, day, type, subject)\n Database.insert(day, index, event)\n\n if started:\n # Create the Event object\n if subject == \"\":\n event = Event(name, start, end, day, type)\n else:\n event = Event(name, start, end, day, type, subject)\n Database.insert(day, index, event)\n\n index += 1\n\n self.destroyFrame()", "def RoadRiverTab():\n\n # Main layout\n mainTab = cmds.columnLayout(adjustableColumn=True, columnAttach=('both', 20))\n \n ### Geo parameters\n cmds.separator(height=10, style=\"none\")\n cmds.text(label=\"Generate road and rivers:\", align=\"left\")\n RoadRiverTab.roadWidth = cmds.floatSliderGrp(label=\"Road Width\", field=True, value=1, min=.01, max=100)\n RoadRiverTab.roadQuality = cmds.intSliderGrp(label=\"Curve Quality\", field=True, value=20, min=2, max=100)\n\n ### Choose which king of geo\n cmds.separator(height=5, style=\"none\")\n cmds.rowLayout(numberOfColumns=3, adjustableColumn=2)\n cmds.button(label='Create Road', width=200, command=buildRoad)\n cmds.separator(style=\"none\")\n cmds.button(label=\"Create River\", width=200, command=buildRiver)\n\n cmds.setParent('..')\n cmds.setParent('..')\n\n return mainTab", "def __button_routes_line_color_clicked(self):\n color = QColorDialog.getColor()\n if color.isValid():\n self.vis.change_route_line_color(color.name())", "def _create(self, __button):\r\n\r\n _starttime = 0.01\r\n if self.chkIncludeZeroHour.get_active():\r\n _starttime = 0.0\r\n\r\n _window = self.assistant.get_root_window()\r\n _window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))\r\n\r\n self._user_log.info('The following records contained inconsistent '\r\n 'information and were not used in the '\r\n 'creation of the data set:\\n')\r\n\r\n (_records, __) = self._get_unit_records(_starttime)\r\n\r\n # Load the results into the survival data table in the RTK Program\r\n # database or write the results to the open file.\r\n if self.optDatabase.get_active():\r\n (_error_codes,\r\n _n_inconsistent) = self._create_database_dataset(_records)\r\n print _error_codes\r\n else:\r\n _error_codes = self._create_file_dataset(_records)\r\n\r\n _window.set_cursor(gtk.gdk.Cursor(gtk.gdk.LEFT_PTR))\r\n\r\n if _n_inconsistent > 0:\r\n Widgets.rtk_information(_(u\"There were {0:d} records with \"\r\n u\"inconsistent information. These \"\r\n u\"were not used in the creation of \"\r\n u\"the dataset. Please see file \"\r\n u\"{1:s} for \"\r\n u\"details.\".format(\r\n _n_inconsistent,\r\n Configuration.LOG_DIR +\r\n 'RTK_error.log')))\r\n\r\n # Load the dataset gtk.TreeView with the newly created dataset if it\r\n # was created in the RTK Program database.\r\n #if self.optDatabase.get_active():\r\n # self._mdcRTK.load_tree()\r\n # self._app.DATASET.load_notebook()\r\n # _page = sum(Configuration.RTK_MODULES[:11])\r\n # self._app.winTree.notebook.set_current_page(_page)\r\n\r\n return False", "def pl_create_order(self):\n\tprint()\n\tprint('Pl - Create Order')\n\n\n\tpartner = self.env['res.partner'].search([\n\t\t\t\t\t\t\t\t\t\t\t\t\t('name', '=', self.patient.name),\n\t\t\t\t\t\t\t\t\t\t\t\t],\n\t\t\t\t\t\t\t\t\t\t\t\t#order='appointment_date desc',\n\t\t\t\t\t\t\t\t\t\t\t\tlimit=1,)\n\n\n\t# Create Order\n\torder = self.env['sale.order'].create({\n\t\t\t\t\t\t\t\t\t\t\t\t\t'state':'draft',\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_doctor': self.physician.id,\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t#'partner_id': self.partner_id.id,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'partner_id': partner.id,\n\t\t\t\t\t\t\t\t\t\t\t\t\t#'x_ruc': self.partner_id.x_ruc,\n\t\t\t\t\t\t\t\t\t\t\t\t\t#'x_dni': self.partner_id.x_dni,\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t'patient': self.patient.id,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_id_doc': self.patient.x_id_doc,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_id_doc_type': self.patient.x_id_doc_type,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_family': 'procedure',\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t'treatment': self.id,\n\t\t\t\t\t\t\t\t\t\t\t\t})\n\t#print(order)\n\n\n\n\t# Create Order Lines\n\tfor cart_line in self.shopping_cart_ids:\n\n\t\tproduct = cart_line.product\n\n\t\t#print(product)\n\t\t#print(product.name)\n\n\t\t# Create Order Line\n\t\tol = order.order_line.create({\n\t\t\t\t\t\t\t\t\t\t'name': \t\tproduct.name,\n\t\t\t\t\t\t\t\t\t\t'product_id': \tproduct.id,\n\t\t\t\t\t\t\t\t\t\t'price_unit': \tcart_line.price,\n\t\t\t\t\t\t\t\t\t\t'product_uom_qty': cart_line.qty,\n\t\t\t\t\t\t\t\t\t\t'order_id': \torder.id,\n\t\t\t\t\t\t\t\t\t})\n\treturn order\n\n\t# pl_create_order", "def __init__(self):\n self.label = \"LineToFar77\"\n self.description = \"\"\n self.canRunInBackground = False", "def add_timeline(self, t):\n\n self.timelines.update({t.name : t})", "def ttint(timelist,venue):\n #setup\n showturtle()\n #make python turtle graphics window 1260 pixels wide and 800 pixels tall\n setup(width = 1260, height = 800, startx = None, starty = None)\n reset()\n #text at top\n pen(pencolor=\"black\")\n pu()\n setpos(0,380)\n write(\"Welcome to your schedule. Use the arrow keys to toggle the day of the week\",move=False,align=\"center\",font=(\"Courier New\",10,\"normal\"))\n setpos(0,360)\n write(\"In Idle, type 'quit()' to exit turtle.\",move=False,align=\"center\",font=(\"Courier New\",10,\"normal\"))\n dayl = [\"Mon\",\"Tue\",\"Wed\",\"Thu\",\"Fri\",\"Sat\",\"Sun\"]\n setpos(0,-350)\n #writes venue at bottom of GUI\n write(venue,move=False,align=\"center\",font=(\"Courier New\",20,\"normal\"))\n #drawing the lines and timing\n #baseY = 300 because y = 300 is the height of the line for monday\n baseY = 300\n for ch in range(7):\n pu()\n #goes to relevant y position for respective day code\n setpos(-570,(baseY-(100*ch)))\n #writes day name at side\n write(dayl[ch],move=False,align=\"center\",font=(\"Courier New\",20,\"normal\"))\n pen(pencolor=\"black\",pensize=\"3\")\n #draws lines\n #for each hour\n for dh in range(19):\n #move right 60 steps\n setx(xcor()+60)\n pd()\n #move up 20 steps\n sety(ycor()+20)\n pu()\n #stop drawing. move up 10 steps and write hour\n sety(ycor()+10)\n write(str((600+(dh*100))),move=False,align=\"center\",font=(\"Courier New\",10,\"normal\"))\n #go back down 30 steps to main line\n sety(ycor()-30)\n #continue drawing\n pd()\n pu()\n #goes to each relevant timing to write module code\n #for every time range in timelist. dp stands for day parse\n for dp in range(len(timelist)):\n #if week day in timelist is not empty\n if len(timelist[dp]) >= 1:\n #for each timing in the week day. hp stands for hour parse\n for hp in range(1,len(timelist[dp])):\n #for each hour in the time range. pr is an arbitrary variable which helps to direct the turtle to the timings in between the start and end time to write the module code at the relevant location\n for pr in range(int((timelist[dp][hp][1]-timelist[dp][hp][0])/100)):\n #go to the relevant time and write the module code in between\n setpos((-840+(int(timelist[dp][hp][0]/100)+pr)*60),(410-timelist[dp][0]*100))\n write(timelist[dp][hp][2],move=False,align=\"center\",font=(\"Courier New\",8,\"normal\"))", "def save(self, *args, **kwargs):\n if not self.pk:\n self.start_time_rent = datetime.date.today()\n self.end_time_rent = self.start_time_rent + datetime.timedelta(days=7)\n self.reservation.isrented = True\n self.reservation.save()\n return super(Rental, self).save(*args, **kwargs)", "def landlord_button_start(self):\n if self._context.get('is_landlord_rent'):\n self.write({'state': 'open', 'rent_entry_chck': False})", "def fill_active(self, layout):\n self.twin_triggers = QLineEdit()\n layout.addRow(\"twin triggers\", self.twin_triggers)\n self.twin_halves = QLineEdit()\n layout.addRow(\"twin halves\", self.twin_halves)", "def create_line(self):\n if self.hosts and self.line:\n self.msg(\"There is a line here already.\")\n self.display_line()\n return\n self.line = []\n other_hosts = [self.caller.search(arg) for arg in self.lhslist]\n other_hosts = [ob for ob in other_hosts if ob and ob.player]\n other_hosts.append(self.caller)\n self.hosts = other_hosts\n if \"loop\" in self.switches:\n self.toggle_loop()\n self.display_line()", "def insert_task_buttons(note,i):\n txt_start_button = open(PATH_GRAPH_UTILS + 'init_buttons.py', 'r', encoding='utf-8').read()\n note['cells'] += [nb.v4.new_code_cell(txt_start_button.replace('$i$',i))]\n note.cells[-1].metadata = {\"init_cell\": True}", "def click_create_new_statement_button(self):\n self.click_element(self.create_new_statement_button_locator)", "def change_to_tasks(self):\n self.ids[\"shp_btn\"].color = 1, 1, 1, 0.5", "def schedule_reservation(reservation_date,reservation_time,party_size,restaurant_name,first_name,restaurant_address):\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n service = build('calendar', 'v3', credentials=creds)\n\n # Call the Calendar API\n now = datetime.datetime.utcnow()\n\n reservation_day=reservation_date.split('/')[0]\n reservation_month =reservation_date.split('/')[1]\n reservation_year =reservation_date.split('/')[2]\n reservation_date = reservation_year+'-'+reservation_month+'-'+reservation_day\n start_time_hr= reservation_time[:2]\n end_time_hr= int(reservation_time[:2])+4\n start_time_min= reservation_time[2:]\n end_time_min=start_time_min\n \n \n event = {\n 'summary': 'Reservation at '+restaurant_name,\n 'location': restaurant_address,\n 'description': 'Reservation for '+party_size+' under '+first_name+' made on '+str(now),\n 'start': {\n 'dateTime': reservation_date+'T'+start_time_hr+':'+start_time_min+':00+08:00',\n 'timeZone': 'Asia/Singapore',\n },\n 'end': {\n 'dateTime': reservation_date+'T'+str(end_time_hr)+':'+end_time_min+':00+08:00',\n 'timeZone': 'Asia/Singapore',\n },\n 'reminders': {\n 'useDefault': False,\n 'overrides': [\n {'method': 'email', 'minutes': 24 * 60},\n {'method': 'popup', 'minutes': 10},\n ],\n },\n }\n\n event = service.events().insert(calendarId='primary', body=event).execute()\n print ('Event created: %s', (event.get('htmlLink')))", "async def create_kline_tasks(self, kline_type, *args, **kwargs):\n for index, symbol in enumerate(self._symbols):\n asyncio.get_event_loop().call_later(index, self.delay_kline_update, symbol, kline_type)", "def _handle_create_line(self, axes, style_args):\n stream_data = self.server.stream_data\n # sample data for initial create\n x_data = numpy.arange(0, 2, 1)\n y_data = numpy.array([0]*2)\n\n line, = axes.plot(x_data, y_data, '-', **style_args)\n # NOTE: client may set 'label'\n line_name = style_args['label']\n if line_name in stream_data:\n # preserve old line data with a new name\n stream_data[line_name+\"_old_\"+timestamp()] = stream_data[line_name]\n # always start with no data for the new line\n stream_data[line_name] = {'y': [], 'line': line, 'last_len': 0}\n if FLAGS.timestamp:\n stream_data[line_name]['x'] = []\n return line_name", "def create_widgets(self):\n #create first button\n self.button1 = Button(self, text = \"Start\")\n self.button1.bind\n self.button1.grid()", "def create_work_item(self):", "def create_order(self):\n\tprint()\n\tprint('OH - pl_create_order')\n\n\t# Search Partner\n\tprint()\n\tprint('Search partner')\n\tpartner = self.env['res.partner'].search([\n\t\t\t\t\t\t\t\t\t\t\t\t\t('name', '=', self.patient.name),\n\t\t\t\t\t\t\t\t\t\t\t\t],\n\t\t\t\t\t\t\t\t\t\t\t\t#order='appointment_date desc',\n\t\t\t\t\t\t\t\t\t\t\t\tlimit=1,)\n\n\t# Search Pl\n\tprint()\n\tprint('Search pricelist')\n\tpricelist = self.env['product.pricelist'].search([\n\t\t\t\t\t\t\t\t\t\t\t#('active', 'in', [True]),\n\t\t\t\t\t\t\t\t\t\t\t],\n\t\t\t\t\t\t\t\t\t\t\t#order='x_serial_nr asc',\n\t\t\t\t\t\t\t\t\t\t\tlimit=1,\n\t\t\t\t\t\t\t\t\t\t)\n\tprint(pricelist)\n\n\t# Create Order\n\torder = self.env['sale.order'].create({\n\t\t\t\t\t\t\t\t\t\t\t\t\t'state':'draft',\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_doctor': self.physician.id,\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t#'partner_id': self.partner_id.id,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'partner_id': partner.id,\n\t\t\t\t\t\t\t\t\t\t\t\t\t#'x_ruc': self.partner_id.x_ruc,\n\t\t\t\t\t\t\t\t\t\t\t\t\t#'x_dni': self.partner_id.x_dni,\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t'patient': self.patient.id,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_id_doc': self.patient.x_id_doc,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_id_doc_type': self.patient.x_id_doc_type,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_family': 'procedure',\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t'treatment': self.id,\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t'pricelist_id': pricelist.id,\n\t\t\t\t\t\t\t\t\t\t\t\t})\n\t#print(order)\n\n\n\n\t# Create Order Lines\n\tfor cart_line in self.shopping_cart_ids:\n\n\t\tproduct = cart_line.product\n\n\t\t#print(product)\n\t\t#print(product.name)\n\n\t\t# Create Order Line\n\t\tol = order.order_line.create({\n\t\t\t\t\t\t\t\t\t\t'name': \t\tproduct.name,\n\t\t\t\t\t\t\t\t\t\t'product_id': \tproduct.id,\n\t\t\t\t\t\t\t\t\t\t'price_unit': \tcart_line.price,\n\t\t\t\t\t\t\t\t\t\t'product_uom_qty': cart_line.qty,\n\t\t\t\t\t\t\t\t\t\t'order_id': \torder.id,\n\t\t\t\t\t\t\t\t\t})\n\treturn order", "def schedule_task(self, name, date):\n pass", "def del_line(self, buttoninstance):\r\n try:\r\n widgets = self.timecompoundlist.pop()\r\n except IndexError:\r\n return\r\n for w in widgets:\r\n self.ids.inlayout.remove_widget(w)\r\n #del the line in the jsonfile\r\n store = get_store()\r\n lastval = store.get('Nbtimecompound')[\"value\"]\r\n store.delete('Timecompound'+str(lastval))\r\n store.put('Nbtimecompound', value=lastval-1)\r\n self.ids.inlayout.rows = 5 + store.get('Nbtimecompound')[\"value\"]\r\n #force the good size\r\n self.ids.tscrollview.change_child_height(self.ids.tscrollview.height)", "def create_grid(self):\n row = 0\n col = 0\n for row in range(self._dim):\n for col in range(self._dim):\n x1 = col*self._cell_dim # bottom left\n y1 = row * self._cell_dim # top left\n x2 = x1 + self._cell_dim # bottom right\n y2 = y1 + self._cell_dim # top right\n self.rect[row,col] = self.canvas.create_rectangle(x1,y1,x2,y2, fill=self._primary_color, outline=self._grid_lines_color, tags=\"rect\")\n self.canvas.tag_bind(self.rect[row, col], '<ButtonPress-1>', self.change_cell)\n col = 0\n row += 1\n if self._dim < 50:\n button_size = int(80*(self._dim/50))\n font_size = int(22*(self._dim/50))\n else:\n button_size = 80\n font_size = 18\n x1 = col * self._cell_dim + (((self._dim*self._cell_dim) - button_size*3)//2)\n y1 = row * self._cell_dim + 5\n x2 = x1 + button_size\n y2 = y1 + 20\n self.canvas.create_oval(x1,y1,x2,y2, tags=\"toggle\", fill=self._primary_color)\n self.canvas.create_text(x1+(button_size//2), y1+10, tags=\"toggle-text\", fill=self._secondary_color, text=\"Start\", font=(\"Courier\", font_size))\n self.canvas.tag_bind(\"toggle\", '<ButtonPress-1>', self.toggle_refresh)\n self.canvas.tag_bind(\"toggle-text\", '<ButtonPress-1>', self.toggle_refresh)\n x1 = x2 + 5 # padding between buttons\n x2 = x1 + button_size\n self.canvas.create_oval(x1,y1,x2,y2, tags=\"next\", fill=self._primary_color)\n self.canvas.create_text(x1+(button_size//2), y1+10, tags=\"next-text\", fill=self._secondary_color, text=\"Next\", font=(\"Courier\", font_size))\n self.canvas.tag_bind(\"next\", '<ButtonPress-1>', self.one_step)\n self.canvas.tag_bind(\"next-text\", '<ButtonPress-1>', self.one_step)\n x1 = x2 + 5 # padding between buttons\n x2 = x1 + button_size\n self.canvas.create_oval(x1,y1,x2,y2, tags=\"clear\", fill=self._primary_color)\n self.canvas.create_text(x1+(button_size//2), y1+10, tags=\"clear-text\", fill=self._secondary_color, text=\"Clear\", font=(\"Courier\", font_size))\n self.canvas.tag_bind(\"clear\", '<ButtonPress-1>', self.clear_board)\n self.canvas.tag_bind(\"clear-text\", '<ButtonPress-1>', self.clear_board)\n self.model_refresh()", "def schedule_text():", "def set_button_to_create(self):\n self.configuration.disable_validations = False\n self.create_tool_button.removeAction(self.set_button_to_create_action)\n self.create_tool_button.removeAction(self.edit_command_action)\n self.create_tool_button.addAction(\n self.set_button_to_create_without_constraints_action\n )\n self.create_tool_button.addAction(self.edit_command_action)\n self.create_tool_button.setText(self.create_text)", "def add_create_pl_btn(self):\n self.create_pl = QPushButton(\"Add to playlist\")\n self.create_pl.clicked.connect(self.pl_btn_push)\n self.hbtnbox.addWidget(self.create_pl)", "def define_button(self):\n self.separator1 = pygame.Rect(\n 0,\n SCREEN_WIDTH,\n SCREEN_WIDTH,\n BIG_LINE_WIDTH,\n )\n self.separator2 = pygame.Rect(\n 0,\n SCREEN_WIDTH + BIG_LINE_WIDTH // 2,\n SCREEN_WIDTH,\n BIG_LINE_WIDTH,\n )\n\n self.button = pygame.Rect(\n SCREEN_WIDTH // 2 - BUTTON_WIDTH // 2,\n (SCREEN_HEIGHT + SCREEN_WIDTH) // 2 - BUTTON_HEIGHT // 2,\n BUTTON_WIDTH,\n BUTTON_HEIGHT,\n )", "def _view_schedule(self):\n def plus_top_attach(f):\n\n def plus(*args, **kwargs):\n top_attach, left_attach = f(*args, **kwargs)\n return top_attach + 1, left_attach + 1\n\n return plus\n\n @plus_top_attach\n def create_label(text, left_attach, right_attach,\n top_attach, bottom_attach, align=None):\n label = gtk.Label('<span font=\"%s\">%s</span>' %\n (Params().get_default_font(), text))\n label.set_use_markup(True)\n if align == 'left':\n label.set_alignment(xalign=0.0, yalign=0.5)\n elif align == 'right':\n label.set_alignment(xalign=1.0, yalign=0.5)\n self.table.attach(label, left_attach, right_attach,\n top_attach, bottom_attach, xoptions=gtk.FILL, yoptions=False)\n label.show()\n return top_attach, left_attach\n\n @plus_top_attach\n def create_separator(left_attach, right_attach,\n top_attach, bottom_attach):\n separator = gtk.HSeparator()\n self.table.attach(separator, left_attach, right_attach,\n top_attach, bottom_attach, xoptions=gtk.FILL, yoptions=False)\n separator.show()\n return top_attach, left_attach\n\n tattach, tlen, view_sch = 0, 0, Params().get_view_sch()\n for i in view_sch:\n if i:\n tlen += 1\n for day in ['Monday', 'Tuesday', 'Wednesday',\n 'Thursday', 'Friday', 'Saturday']:\n tattach = create_label('<b><span color=\"%s\">%s</span></b>' %\n (Params().get_day_color(), day), 0, tlen,\n tattach, tattach + 1, 'left')[0]\n tattach = create_separator(0, tlen, tattach, tattach + 1)[0]\n\n schedule = Schedule().get_schedule(day,\n Schedule().get_current_week() - 1)\n for i in range(8):\n if not schedule[i][1] == '' and \\\n (schedule[i][0] == Schedule().get_subgroup() or\n schedule[i][0] == 2):\n if not schedule[i][2]:\n label_color = '%s' % str(Params().get_lecture_color())\n elif schedule[i][2] == 1:\n label_color = '%s' % \\\n str(Params().get_laboratory_color())\n elif schedule[i][2] == 2:\n label_color = '%s' % str(Params().get_practice_color())\n else:\n label_color = '%s' % str(Params().get_non_color())\n\n label_template = '<span color=\"%s\">%s</span>'\n lattach = 0\n if view_sch[0]:\n lattach = create_label('<span color=\"%s\">%d.</span>' %\n (label_color, i),\n lattach, lattach + 1, tattach, tattach + 1)[1]\n if view_sch[1]:\n lattach = create_label(label_template % (label_color,\n '-'.join(Schedule().get_lessons_time()[i])),\n lattach, lattach + 1, tattach, tattach + 1)[1]\n if view_sch[2]:\n lattach = create_label(label_template %\n (label_color, schedule[i][1]),\n lattach, lattach + 1,\n tattach, tattach + 1, 'left')[1]\n if view_sch[3]:\n lattach = create_label(label_template %\n (label_color, schedule[i][3]),\n lattach, lattach + 1, tattach, tattach + 1)[1]\n if view_sch[4]:\n create_label(label_template %\n (label_color, schedule[i][4]),\n lattach, lattach + 1,\n tattach, tattach + 1, 'right')\n tattach += 1", "def test_add_recurring_schedule(self):\n pass", "def __init__widget(self):\n self.__introduction = QtWidgets.QPushButton('使用“WSAD”对应“上下左右”控制\\n贪吃蛇,点击开始游戏!')\n self.add_layout_widget(self.central_widget, self.__introduction)\n self.__introduction.clicked.connect(self.__run)\n self.__ticker.timeout.connect(self.__snake_move)", "def _create_actions(self):\n self.save_button.clicked.connect(self._save)", "def on_pushButton_precedent_clicked(self):\n \n if self.lineEdit_temperature.text() !=\"\":\n num_pt =int(self.label_pt.text())\n if num_pt - 1 < 1:\n pass\n else:\n \n #effacement\n for ligne in range(11):\n for colonne in range(8):\n if colonne !=6:\n \n self.tableWidget_mesures.setItem(ligne, colonne, QtGui.QTableWidgetItem(None))\n self.lineEdit_temperature.clear()\n self.lineEdit_stab_max.clear()\n self.lineEdit_u_stab_max.clear() \n self.lineEdit_hom_max_2.clear()\n self.lineEdit_u_hom_max.clear()\n \n else:\n pass\n #reafctation des donnees \n self.reaffectation_table_widget_mesures(str(int(self.label_pt.text())-1)) \n \n #presentation textEdit n°pt de la mesure\n self.label_pt.setText(str(num_pt -1))", "def onTrajCalcButtonPress(self, button):\n\t\t# calculate trajectory and save as new Trajectory object\n\t\tself.trajectory = Trajectory(trajectory_planner.planner(waypoints_gui, radioJoint))\n\t\t# show generated trajectory in new window\n\t\tdraw.draw_traj(self.trajectory.waypoints, self.trajectory.trajectory)\n\t\t# enable go button\n\t\tself.button_go_traj.set_sensitive(True)", "def add_button(self):\n if len(self.datamodels) == 0:#In this case the button is deleted completely as there is no reference to it\n self.addDataDisplays = QtGui.QPushButton(self) #Draw (+) button to add data displays\n self.addDataDisplays.setText(\"+\")\n self.addDataDisplays.clicked.connect(self.add_data_display)\n self.verticalLayout.removeWidget(self.addDataDisplays)\n self.verticalLayout.addWidget(self.addDataDisplays)", "def createBtnCmd(self, *args):\r\n self.cone = cmds.polyCone()\r\n cmds.setAttr('%s.rotateX' % self.cone[0], 90)", "def timeSpaceDiagramMethod(self):\n fig, ax1 = plt.subplots()\n\n ax1.set_xlabel('Time (s)', fontsize=24, fontweight='bold')\n ax1.set_ylabel('Distance (m)', fontsize=24, fontweight='bold')\n max_x_limit = self.xAxisRange-100\n plt.xlim([0, max_x_limit])\n plt.ylim([0, max(self.distance_Green)+400])\n plt.xticks(np.arange(0, self.xAxisRange-75, 50), fontsize=24)\n ax1.tick_params(axis='y', labelsize=18)\n for axis in ['top', 'bottom', 'left', 'right']:\n ax1.spines[axis].set_linewidth(4)\n # ax1.set_yticks(ticks=np.arange(0, 100, 20),fontsize = 24)\n #newYlabel = ['-400','0','395','810','1225']\n # plt.gca().set_yticklabels(newYlabel)\n # plt.yticks([])\n req_phase_length = len(self.greenRectangleStartPoint)\n for i in range(0, req_phase_length):\n x = self.greenRectangleStartPoint[i]\n y = self.distance_Green[i]\n ax1.add_patch(Rectangle(\n (x, y), self.greenTime[i], 30, angle=0.0, color='green', linewidth=2,))\n\n req_phase_length = len(self.clearanceRectangleStartPoint)\n for i in range(0, req_phase_length):\n x = self.clearanceRectangleStartPoint[i]\n y = self.distance_Clearance[i]\n ax1.add_patch(Rectangle(\n (x, y), self.clearanceTime[i], 30, angle=0.0, color='red', linewidth=2))\n\n\n if len(self.evTrajectoryTimePoint) > 0:\n ax1.scatter(self.evTrajectoryTimePoint, self.evTrajectoryDistancePoint, c=\"black\", linewidths=4,\n marker=\".\", edgecolor=\"none\", s=50, label='Connected Vehicles Trajectory', zorder=2)\n\n if len(self.transitTrajectoryTimePoint) > 0:\n ax1.scatter(self.transitTrajectoryTimePoint, self.transitTrajectoryDistancePoint, c=\"black\",\n linewidths=4, marker=\".\", edgecolor=\"none\", s=50, label='Connected Vehicles Trajectory', zorder=2)\n\n if len(self.truckTrajectoryTimePoint) > 0:\n ax1.scatter(self.truckTrajectoryTimePoint, self.truckTrajectoryDistancePoint, c=\"black\",\n linewidths=4, marker=\".\", edgecolor=\"none\", s=50, label='Connected Vehicles Trajectory', zorder=2)\n\n if len(self.carTrajectoryTimePoint) > 0:\n ax1.scatter(self.carTrajectoryTimePoint, self.carTrajectoryDistancePoint, c=\"black\", linewidths=4,\n marker=\".\", edgecolor=\"none\", s=50, label='Connected Vehicles Trajectory', zorder=2)\n\n if len(self.connectedVehicleTrajectoryTimePoint) > 0:\n ax1.scatter(self.connectedVehicleTrajectoryTimePoint, self.connectedVehicleTrajectoryDistancePoint, c=\"black\", linewidths=4,\n marker=\".\", edgecolor=\"none\", s=50, label='Connected Vehicles Trajectory', zorder=2) \n\n ax1.legend(loc='upper right', prop={\"size\": 16})\n ax1.set_title(\"Time-Space Diagram\", fontsize=20, fontweight='bold')\n fig.tight_layout() # otherwise the right y-label is slightly clipped\n plt.grid(color='black', linestyle='-', linewidth=0.5)\n plt.show()", "def setECVButton(self):\n self.ECVButton = qt.QPushButton(\"Create ECV Map\")\n self.ECVButton.toolTip = \"Create the ECV map with the volumes selected as Native and Enhanced LL\"\n self.ECVButton.enabled = False\n self.ECVcollButton_Layout.addRow(self.ECVButton)", "def validationNeedle(self):\n #productive #onButton\n profprint()\n widget = slicer.modules.NeedleFinderWidget\n widget.validationNeedleNumber += 1\n widget.validationNeedleButton.text= \"New Validation Needle: (\"+str(widget.validationNeedleNumber)+\")->(\"+str(widget.validationNeedleNumber+1)+\")\"\n # self.tableValueCtrPt.append([])\n widget.stepNeedle = 0", "def _schedule(self):\n name=input(\"\\nEnter the patient's name\")\n condition=self._getCondition()\n self._model.add(Patient(name,condition))\n print(name,\"is added to the \",str(condition),\" list\\n\")", "def click_add_financial_charges_button(self):\n self.number_of_financial_charges_before_create = self.get_number_of_financial_charges_in_the_grid()\n self.click_element(self.add_financial_charges_button_locator, hover=True)", "def add_button_clicked(self, obj):\n note = Note()\n if self.notetype :\n note.set_type(self.notetype)\n try:\n from .. import EditNote\n EditNote(self.dbstate, self.uistate, self.track, \n note, self.add_callback,\n self.callertitle, extratype = [self.notetype])\n except WindowActiveError:\n pass", "def create_rental(self, student_id:int, rental_instrument:int, start_date:date, months_to_rent:int):\n try:\n s = start_date\n start_date = self.date_to_strf(s)\n # end_date = \"{}-{:02d}-{:02d}\".format(s.year, s.month + months_to_rent, s.day)\n self.cursor.execute(\"\"\" \n INSERT INTO rental (start_date, end_date, student_id, ri_id)\n VALUES (%s, %s::date + INTERVAL '%s month', %s , %s)\n \"\"\", [start_date, start_date, months_to_rent, student_id, rental_instrument])\n self.db.commit()\n except Exception as e:\n self.db.rollback()\n raise RuntimeError(\"No student found to be able to complete rental.\")", "def append(self, spew, line):\n nextRound = []\n for builder in self.eventFactories:\n event = builder.start(spew, line)\n if event:\n nextRound.append(event)\n for event in self.create:\n event.append(spew, line)\n if event.finished:\n self.events.append(event)\n else:\n nextRound.append(event)\n self.create = nextRound", "def add_new_arrival(self):\n pass", "def __init__(self, terminal ,parent=None):\n QObject.__init__(self)\n QWidget.__init__(self, parent)\n\n self.terminal = terminal\n self.terminal.updateadd.connect(self.additemRow)\n\n self.tableWidget = QTableWidget(self)\n self.tableWidget.setColumnCount(4)\n self.tableWidget.setRowCount(0)\n item = QTableWidgetItem()\n self.tableWidget.setHorizontalHeaderItem(0, item)\n item = QTableWidgetItem()\n self.tableWidget.setHorizontalHeaderItem(1, item)\n item = QTableWidgetItem()\n self.tableWidget.setHorizontalHeaderItem(2, item)\n item = QTableWidgetItem()\n self.tableWidget.setHorizontalHeaderItem(3, item)\n itemadd = self.tableWidget.horizontalHeaderItem(0)\n itemadd.setText(\"No HP\")\n itemadd = self.tableWidget.horizontalHeaderItem(1)\n itemadd.setText( \"Tgl\")\n itemadd = self.tableWidget.horizontalHeaderItem(2)\n itemadd.setText(\"Jam\")\n itemadd = self.tableWidget.horizontalHeaderItem(3)\n itemadd.setText(\"Pesan\")\n #creat grid layout\n veticalbox = QVBoxLayout()\n veticalbox.addWidget(self.tableWidget)\n\n # Create main layout\n container = QHBoxLayout()\n container.addLayout(veticalbox)\n self.setLayout(container)\n\n #self.additemRow('+62876767862','17/02/28' , '15:47:58+28' , 'testing testing testing')", "def createButtons(self):\r\n buttonsPosition = {\r\n \"BROWSE\": (3, 2),\r\n \"CALCULATE\": (13, 2),\r\n \"SAVE\": (14, 0),\r\n \"CLEAR\": (14, 1),\r\n \"DELETE\": (14, 2),\r\n \"PRINT\": (13, 3),\r\n \"GENERATE BILL\": (14, 3)\r\n }\r\n self.buttons = {}\r\n for widgetName, position in buttonsPosition.items():\r\n self.button = QPushButton(widgetName)\r\n\r\n self.button.setStyleSheet(\"\"\"\r\n QPushButton {\r\n\r\n background-color: #A8DBC5;\r\n font-family: arial;\r\n font-weight: bold;\r\n font-size: 12px;\r\n border-color: white;\r\n }\r\n QPushButton:hover {\r\n background-color: #DAE0E2;\r\n }\r\n \"\"\")\r\n self.grid.addWidget(self.button, position[0], position[1])\r\n self.buttons[widgetName] = self.button\r\n # Setting calendar icon\r\n self.buttons[\"BROWSE\"].setIcon(QIcon(\"calendarr.png\"))\r\n # Buttons Signals\r\n self.buttons[\"CLEAR\"].clicked.connect(self.clearAll)\r\n self.buttons[\"BROWSE\"].clicked.connect(self.calendar)\r\n self.buttons[\"CALCULATE\"].clicked.connect(self.calculate)\r\n self.buttons[\"PRINT\"].clicked.connect(self.printBill)", "def create_buttons(self, width=0.2):\n b_N, b_length = self.num_buttons, width\n b_sep = (1. / (b_N + 1)) * (1 - b_N * b_length)\n for b in range(b_N):\n start = (b + 1) * b_sep + b * b_length\n r = [start, 0.05, b_length, 0.075]\n self.regions.append(r)\n\n # adjust the figure\n plt.subplots_adjust(bottom=0.25)\n # populate buttons\n for b in range(b_N):\n axbutton = plt.axes(self.regions[b])\n button = Button(axbutton, self.button_labels[b])\n button.on_clicked(self.actions[self.button_actions[b]])\n self.buttons.append(button)", "def action_move_create(self, cr, uid, ids, context=None):\n ait_obj = self.pool.get('account.invoice.tax')\n cur_obj = self.pool.get('res.currency')\n period_obj = self.pool.get('account.period')\n payment_term_obj = self.pool.get('account.payment.term')\n journal_obj = self.pool.get('account.journal')\n move_obj = self.pool.get('account.move')\n if context is None:\n context = {}\n for inv in self.browse(cr, uid, ids, context=context):\n if not inv.journal_id:\n raise orm.except_orm(_('Error!'),\n _('Journal not defined for this invoice!'))\n if not inv.journal_id.iva_registry_id:\n raise orm.except_orm(_('Error!'),\n _('You must link %s with a VAT registry!') % (inv.journal_id.name))\n if not inv.journal_id.sequence_id:\n raise orm.except_orm(_('Error!'),\n _('Please define sequence on the journal related to this invoice.')) \n if not inv.invoice_line:\n raise orm.except_orm(_('No Invoice Lines!'),\n _('Please create some invoice lines.'))\n if inv.move_id:\n continue\n\n ctx = context.copy()\n ctx.update({'lang': inv.partner_id.lang})\n if not inv.date_invoice:\n self.write(cr, uid, [inv.id],\n {'date_invoice': fields.date.context_today(self,\n cr,\n uid,\n context=context)},\n context=ctx)\n company_currency = self.pool['res.company'].browse(cr, uid,\n inv.company_id.id).currency_id.id\n # create the analytical lines\n # one move line per invoice line\n # iml = self._get_analytic_lines(cr, uid, inv.id, context=ctx)\n iml = super(account_invoice_makeover, self)._get_analytic_lines(cr, uid, inv.id, context=ctx)\n # check if taxes are all computed\n compute_taxes = ait_obj.compute(cr, uid, inv.id, context=ctx)\n # self.check_tax_lines(cr, uid, inv, compute_taxes, ait_obj)\n super(account_invoice_makeover, self).check_tax_lines(cr, uid, inv, compute_taxes, ait_obj)\n\n # I disabled the check_total feature\n group_check_total_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account', 'group_supplier_inv_check_total')[1]\n group_check_total = self.pool.get('res.groups').browse(cr, uid,\n group_check_total_id,\n context=context)\n if group_check_total and uid in [x.id for x in group_check_total.users]:\n if (inv.type in ('in_invoice', 'in_refund') and abs(inv.check_total - inv.amount_total) >= (inv.currency_id.rounding / 2.0)):\n raise orm.except_orm(_('Bad Total!'), _('Please verify the price of the invoice!\\nThe encoded total does not match the computed total.'))\n\n if inv.payment_term:\n total_fixed = total_percent = 0\n for line in inv.payment_term.line_ids:\n if line.value == 'fixed':\n total_fixed += line.value_amount\n if line.value == 'procent':\n total_percent += line.value_amount\n total_fixed = (total_fixed * 100) / (inv.amount_total or 1.0)\n if (total_fixed + total_percent) > 100:\n raise orm.except_orm(_('Error!'), _(\"Cannot create the invoice.\\nThe related payment term is probably misconfigured as it gives a computed amount greater than the total invoiced amount. In order to avoid rounding issues, the latest line of your payment term must be of type 'balance'.\"))\n\n # one move line per tax line\n iml += ait_obj.move_line_get(cr, uid, inv.id)\n\n# entry_type = ''\n if inv.type in ('in_invoice', 'in_refund'):\n ref = inv.reference\n# entry_type = 'journal_pur_voucher'\n# if inv.type == 'in_refund':\n# entry_type = 'cont_voucher'\n else:\n # ref = self._convert_ref(cr, uid, inv.number)\n ref = super(account_invoice_makeover, self)._convert_ref(cr, uid, inv.number)\n# entry_type = 'journal_sale_vou'\n# if inv.type == 'out_refund':\n# entry_type = 'cont_voucher'\n\n diff_currency_p = inv.currency_id.id <> company_currency\n # create one move line for the total and possibly adjust the other lines amount\n total = 0\n total_currency = 0\n # total, total_currency, iml = self.compute_invoice_totals(cr, uid, inv, company_currency, ref, iml, context=ctx)\n total, total_currency, iml = super(account_invoice_makeover, self).compute_invoice_totals(cr, uid, inv, company_currency, ref, iml, context=ctx)\n acc_id = inv.account_id.id\n\n name = inv['name'] or inv['supplier_invoice_number'] or '/'\n totlines = False\n if inv.payment_term:\n totlines = payment_term_obj.compute(cr,\n uid, inv.payment_term.id, total, inv.date_invoice or False, context=ctx)\n if totlines:\n res_amount_currency = total_currency\n i = 0\n ctx.update({'date': inv.date_invoice})\n for t_line in totlines:\n if inv.currency_id.id != company_currency:\n amount_currency = cur_obj.compute(cr, uid, company_currency, inv.currency_id.id, t_line[1], context=ctx)\n else:\n amount_currency = False\n\n # last line add the diff\n res_amount_currency -= amount_currency or 0\n i += 1\n if i == len(totlines):\n amount_currency += res_amount_currency\n\n iml.append({\n 'type': 'dest',\n 'name': name,\n 'price': t_line[1],\n 'account_id': acc_id,\n 'date_maturity': t_line[0],\n 'amount_currency': diff_currency_p \\\n and amount_currency or False,\n 'currency_id': diff_currency_p \\\n and inv.currency_id.id or False,\n 'ref': ref,\n 'payment_type': t_line[2]\n })\n else:\n iml.append({\n 'type': 'dest',\n 'name': name,\n 'price': total,\n 'account_id': acc_id,\n 'date_maturity': inv.date_due or False,\n 'amount_currency': diff_currency_p \\\n and total_currency or False,\n 'currency_id': diff_currency_p \\\n and inv.currency_id.id or False,\n 'ref': ref,\n 'payment_type': None\n })\n\n date = inv.date_invoice or time.strftime('%Y-%m-%d')\n\n part = self.pool.get(\"res.partner\")._find_accounting_partner(inv.partner_id)\n\n line = map(lambda x:(0, 0, self.line_get_convert(cr, uid, x, part.id, date, context=ctx)), iml)\n\n # line = self.group_lines(cr, uid, iml, line, inv)\n line = super(account_invoice_makeover, self).group_lines(cr, uid, iml, line, inv)\n\n journal_id = inv.journal_id.id\n journal = journal_obj.browse(cr, uid, journal_id, context=ctx)\n if journal.centralisation:\n raise orm.except_orm(_('User Error!'),\n _('You cannot create an invoice on a centralized journal. Uncheck the centralized counterpart box in the related journal from the configuration menu.'))\n\n line = self.finalize_invoice_move_lines(cr, uid, inv, line)\n\n move = {\n 'ref': inv.reference and inv.reference or inv.name,\n 'line_id': line,\n 'journal_id': journal_id,\n 'date': date,\n 'narration': inv.comment,\n 'company_id': inv.company_id.id,\n }\n period_id = inv.period_id and inv.period_id.id or False\n ctx.update(company_id=inv.company_id.id,\n account_period_prefer_normal=True)\n if not period_id:\n period_ids = period_obj.find(cr, uid, inv.registration_date, context=ctx)\n period_id = period_ids and period_ids[0] or False\n if period_id:\n move['period_id'] = period_id\n for i in line:\n i[2]['period_id'] = period_id\n\n ctx.update(invoice=inv)\n move_id = move_obj.create(cr, uid, move, context=ctx)\n new_move_name = move_obj.browse(cr, uid, move_id, context=ctx).name\n # make the invoice point to that move\n self.write(cr, uid, [inv.id], {'move_id': move_id, 'period_id':period_id, 'move_name':new_move_name}, context=ctx)\n # Pass invoice in context in method post: used if you want to get the same\n # account move reference when creating the same invoice after a cancelled one:\n move_obj.post(cr, uid, [move_id], context=ctx)\n # self._log_event(cr, uid, ids)\n super(account_invoice_makeover, self)._log_event(cr, uid, ids)\n return True", "def __init__(self):\n\n\t\tself.tasklist = TaskList()\n\t\tself.legend = '\\nLegend: Not Due ' + Fore.CYAN + Style.BRIGHT + 'Upcoming ' + Fore.BLUE + \\\n\t\t Style.BRIGHT + 'Due ' + Fore.RED + Style.BRIGHT + 'Overdue ' + Fore.WHITE + Style.BRIGHT + \\\n\t\t Back.WHITE + 'Completed' + Fore.RESET + Style.NORMAL + Back.RESET", "def btn_create_order_con(self):\n\t\tprint()\n\t\tprint('btn_create_order_con')\n\n\t\t# Init\n\t\t\n\t\t# Search Partner\n\t\tpartner = tre_funcs.get_partner(self, self.patient.name)\n\n\t\t# Search pricelist\n\t\tpricelist = tre_funcs.get_pricelist(self)\n\n\t\t# Search product\n\t\tname = 'CONSULTA MEDICA'\n\t\tprice_list = '2019'\n\t\tproduct = tre_funcs.get_product_product(self, name, price_list)\n\n\t\t# Check \n\t\tproduct_template = tre_funcs.get_product_template(self, name, price_list)\n\t\ttre_funcs.check_product(self, '2019', product, product_template)\n\n\t\t# Create order \n\t\torder = pl_creates.create_order_con(self, partner.id, pricelist.id, product)\n\t\t\n\t\t# Open Order\n\t\treturn action_funcs.open_order(order)", "def add_entry(self, start_day, start_hour, stop_day, stop_hour, mode, radar=[\"ALL\"]):\n self.entries.append(Entry(self.year, self.month, start_day, start_hour, stop_day, stop_hour, mode, radar))", "def increment_time(self, **kwargs):\n \n #Pull all optional keyword arguements\n if 'timerange' in kwargs:\n timerange = kwargs.pop('timerange')\n else:\n timerange = 7\n \n if 'display' in kwargs:\n displayflag = kwargs.pop('display')\n else:\n displayflag = 1\n \n if 'auto' in kwargs:\n autoflag = kwargs.pop('auto')\n else:\n autoflag = 0\n \n if 'triggered' in kwargs:\n triggered_rules = kwargs.pop('triggered')\n else:\n triggered_rules = []\n \n #Run simulation one day at a time until specified end point is reached\n count = range(0,timerange)\n for i in count:\n \n \n #Increment one day if at least one infected person remains. If not, end the simulation\n if self.SD_Map.IPop.value() > 1:\n time = self.timeSeries[-1]\n self.timeSeries.append(time+1)\n self.SD_Map.update_all(self.timestep(), len(self.timeSeries)-2)\n else:\n print('Done!')\n \n #Update the time display\n self.timev.set(self.timeSeries[-1])\n \n #Add any triggered rules to the rule log display\n if triggered_rules != []:\n day_text = self.translate('Day')+' ' + str(self.timeSeries[-1]) \n rule_text = '; ' + self.translate('Rules') + ': ' + str(triggered_rules)[1:-1]\n log_text = day_text + rule_text\n self.list_info_boxes['Log'].insert(tk.END, log_text)\n \n #If appropriate, update all of the graphs\n if displayflag == 1:\n if self.arrangment == ['Map', 'Graph']:\n index = 2\n invertflag = 1\n else:\n index = 0\n invertflag = 0\n \n #Select all of the graphs\n canvaslist = []\n for entrylist in self.graph_canvas_list:\n for entry in entrylist:\n canvaslist.append(entry)\n\n #For each graph, delete it and replace it with an update graph\n for canvas in canvaslist:\n if index < 2:\n col = 0\n inputindex = index\n self.figures[index].clear()\n plt.close(self.figures[index])\n else:\n col = 1\n inputindex = index - 2\n if invertflag:\n self.figures[inputindex].clear()\n plt.close(self.figures[inputindex])\n else:\n self.figures[index].clear()\n plt.close(self.figures[index])\n \n #Make new graph\n framename = canvas.get_tk_widget().master\n canvas.get_tk_widget().destroy()\n graph = self.translate(self.graph_setting_list[col][inputindex].get(),\n input_language=self.language,\n output_language='english')\n canvas,fig = self.make_graph(framename, graph,\n gridpos = inputindex*2+1)\n self.graph_canvas_list[col][inputindex]=canvas\n \n #Update figures list\n if invertflag:\n self.figures[inputindex] = fig\n else:\n self.figures[index] = fig\n index += 1", "def create_invoice(self):\n for line in self:\n # if not line.account_id:\n # raise UserError(_('Please Add the incoming Account !!'))\n self.ensure_one()\n journal_id = self.env['account.journal'].search([\n ('type', '=', 'sale')], limit=1)\n inv_line_main = {\n 'name': line.description.name,\n 'price_unit': line.amount or 0.00,\n 'quantity': 1,\n 'discount': line.discount,\n 'account_id': line.description.property_account_income_id.id or line.description.categ_id.property_account_income_categ_id.id or False,\n }\n inv_values = {\n 'partner_id': line.patient_id.partner_id.id,\n 'patient_id': line.patient_id.id,\n 'dentist': line.dentist.id,\n 'move_type': 'out_invoice',\n 'invoice_date': datetime.now().strftime(DF) or False,\n 'journal_id': journal_id and journal_id.id or False,\n 'teeth_id': line.patient_id and line.patient_id.id or False,\n }\n acc_id = self.env['account.move'].create(inv_values)\n acc_id.write({'invoice_line_ids': [(0, 0, inv_line_main)]})\n\n self.write({'invc_id': acc_id.id, 'inv': True})\n context = dict(self._context or {})\n wiz_form_id = self.env['ir.model.data'].get_object_reference(\n 'account', 'view_move_form')[1]\n\n return {\n 'view_type': 'form',\n 'view_id': wiz_form_id,\n 'view_mode': 'form',\n 'res_model': 'account.move',\n 'res_id': self.invc_id.id,\n 'type': 'ir.actions.act_window',\n 'target': 'current',\n 'context': context,\n }", "def schedule(request):\r\n\r\n return render(request, 'editorial/schedule.html', {})", "def create():\n config = request.data\n return add_scheduling_block(config)", "def openVisualizzazione(self):\n # controllo che sia stata creata una rete o che sia stata configurata\n if(self.__sequenza == True):\n self.ui.comunicazione.setText('')\n # inizializzo il cronometro per contare\n # il tempo di esecuzione\n start = time.time()\n # parte il training del modello\n self.__myNNcreator.trainingModello()\n # stoppo il cronometro\n end = time.time()\n # calcolo il lasso di tempo effettivo\n tempo = end-start\n tempo = round(tempo,2)\n # salvo il percorso contenente l'immagine \n # dell' andamento della loss e della accuracy\n pathImg = self.__myNNcreator.salvataggioRisultati()\n # parte il testing della rete \n self.__myNNcreator.testing()\n # mi salvo i risultati dei testing e di training\n test, train = self.__myNNcreator.risultati()\n # creo ed apro la interfaccia di visualizzazione\n # per mostrare i risultati\n self.uiVisualizzazione = Visualizzazione()\n self.uiVisualizzazione.setup(test,train,pathImg,self.__myNNcreator,tempo)\n self.uiVisualizzazione.show()\n\n # rendo il tasto 'convalida' nuovamente cliccabile\n self.ui.but_convalida.setEnabled(True)\n self.ui.but_convalida.setText('convalida')\n # rendo il tasto 'carica' nuovamente cliccabile\n self.ui.but_caricaRete.setEnabled(True)\n self.ui.but_caricaRete.setText('carica rete')\n # rendo il tasto 'salva' nuovamente cliccabile\n self.ui.but_salva.setEnabled(True)\n self.ui.but_salva.setText('salva')\n else:\n self.ui.comunicazione.setText('err: salva/carica una rete')", "def action_create_invoice(self):\n if self.partner_id:\n supplier = self.partner_id\n else:\n supplier = self.partner_id.search(\n [(\"name\", \"=\", \"Salon Default Customer\")])\n lines = []\n product_id = self.env['product.product'].search(\n [(\"name\", \"=\", \"Salon Service\")])\n for records in self.order_line_ids:\n if product_id.property_account_income_id.id:\n income_account = product_id.property_account_income_id.id\n elif product_id.categ_id.property_account_income_categ_id.id:\n income_account = product_id.categ_id.\\\n property_account_income_categ_id.id\n else:\n raise UserError(\n _(\"Please define income account for this product: \"\n \"'%s' (id:%d).\") % (product_id.name, product_id.id))\n value = (0, 0, {\n 'name': records.service_id.name,\n 'account_id': income_account,\n 'price_unit': records.price,\n 'quantity': 1,\n 'product_id': product_id.id,\n })\n lines.append(value)\n invoice_line = {\n 'move_type': 'out_invoice',\n 'partner_id': supplier.id,\n 'invoice_user_id': self.env.user.id,\n 'invoice_origin': self.name,\n 'invoice_line_ids': lines,\n }\n inv = self.env['account.move'].create(invoice_line)\n action = self.env.ref('account.action_move_out_invoice_type',\n raise_if_not_found=False)\n result = {\n 'name': action.name,\n 'type': 'ir.actions.act_window',\n 'views': [[False, 'form']],\n 'target': 'current',\n 'res_id': inv.id,\n 'res_model': 'account.move',\n }\n self.inv_stage_identifier = True\n self.stage_id = 3\n invoiced_records = self.env['salon.order'].search(\n [('stage_id', 'in', [3, 4]), ('chair_id', '=', self.chair_id.id)])\n total = 0\n for rows in invoiced_records:\n invoiced_date = str(rows.date)\n invoiced_date = invoiced_date[0:10]\n if invoiced_date == str(date.today()):\n total = total + rows.price_subtotal\n self.chair_id.collection_today = total\n self.update_number_of_orders()\n return result", "def goto_create(self):\n\n self.create.click()", "def startSchedule(self):\n DPxStartDinSched()", "def on_btnCalendarResIn_clicked(self,widget):\n try:\n variables.semaforo = 2\n variables.vencalendar.connect('delete-event', lambda w, e: w.hide() or True)\n variables.vencalendar.show()\n except:\n print('error abrir calendario')", "def storefront_generate():\n\n\tfrom pyrevit import script\n\n\ttol = 0.001\n\n\tversion = __revit__.Application.VersionNumber.ToString()\n\tuidoc = __revit__.ActiveUIDocument\n\tdoc = uidoc.Document\n\tcurrentView = uidoc.ActiveView\n\n\tstorefrontFull = []\n\tstorefrontPartial = []\n\tselectedLevels = []\n\tstorefrontFullLines = []\n\tstorefrontPartialLines = []\n\tinteriorWallsLines = []\n\tinteriorWallsLinesEdges = []\n\tselectedDoors = []\n\tselectedRooms = []\n\tselectedFloors = []\n\n\tecModelInst = None\n\tdocEC = None\n\tecTransform = None\n\n\tallWallsEC = []\n\tallLevelsEC = []\n\tallColumnsEC = []\n\twallsLinesEdgesEC = []\n\tselectedLevelsEC = []\n\tselectedWallsEC = []\n\tselectedColumnsEC = []\n\twallsLinesEdgesEC = []\n\n\n\tdistTol = 0.5 \n\tangleTol = 0.01\n\tabsoluteTol = 0.001\n\n\tminPanelWidth = 1.0\n\n\tdocLoaded = RevitLoadECDocument()\n\tdocEC = docLoaded[0]\n\tecTransform = docLoaded[1]\n\n\tmrTimer = Timer()\n\n\n\t######################################################################\n\t# Collects all elements in a view #\n\t######################################################################\n\n\tselectedLevel = __revit__.ActiveUIDocument.ActiveView.GenLevel.Id\n\tselectedLevelInst = doc.GetElement(selectedLevel)\n\n\tcurrentSelected = list(uidoc.Selection.GetElementIds())\n\tselectedStorefront = []\n\n\tallWalls = GetAllElements(doc, BuiltInCategory.OST_Walls, Autodesk.Revit.DB.Wall, currentView=True)\n\tallColumns = GetAllElements(doc, BuiltInCategory.OST_Columns, Autodesk.Revit.DB.FamilyInstance, currentView=True)\n\tallColumns += GetAllElements(doc, BuiltInCategory.OST_StructuralColumns, Autodesk.Revit.DB.FamilyInstance, currentView=True)\n\n\tinteriorWalls = FilterElementsByName(doc, allWalls,[\"Storefront\",\"Storefront\"], True)\n\n\tif currentSelected:\n\t\tfor id in currentSelected:\n\t\t\tinst = doc.GetElement(id)\n\t\t\tif inst.Category.Name == \"Walls\":\n\t\t\t\tinstName = None\n\t\t\t\ttry:\n\t\t\t\t\tinstName = inst.Name.lower()\n\t\t\t\texcept:\n\t\t\t\t\tfor p in inst.Parameters:\n\t\t\t\t\t\tif p.Definition.Name == \"Name\":\n\t\t\t\t\t\t\tinstName = p.AsString().lower()\n\t\t\t\tif \"storefront\" in instName:\n\t\t\t\t\tif \"full\" in instName:\n\t\t\t\t\t\tstorefrontFull.append(id)\n\t\t\t\t\telif \"partial\" in instName:\n\t\t\t\t\t\tstorefrontPartial.append(id)\n\telse:\n\n\t\tstorefrontFull = FilterElementsByName(doc, allWalls,[\"Storefront\",\"Full\"], False)\n\t\tstorefrontPartial = FilterElementsByName(doc, allWalls,[\"Storefront\",\"Partial\"], False)\n\t\n\t#Collect existing storefront curtain walls and check their Marks to ensure they incrememt. \n\tstartingAssembyId = 0\n\tstorefrontWallsInView = rpw.db.Collector(of_class='Wall', \n\t\t\t\t\t\t\t\t\t\t\tview=currentView, \n\t\t\t\t\t\t\t\t\t\t\twhere=lambda x: str(x.WallType.Kind) == \"Curtain\")\n\ttempList = []\n\tfor storefrontInView in storefrontWallsInView:\n\t\tmark = storefrontInView.get_Parameter(BuiltInParameter.ALL_MODEL_MARK).AsString()\n\t\tif mark:\n\t\t\ttempList.append(int(mark[mark.index(\"-\")+1:]))\n\tif tempList:\n\t\tsortedList = sorted(tempList)\n\t\tstartingAssembyId = sortedList[-1]\n\n\n\ttempList = []\n\t#Makes sure no stacked walls are included.\n\tfor wallId in interiorWalls:\n\t\twall = doc.GetElement(wallId)\n\t\tif not wall.IsStackedWallMember:\n\t\t\ttempList.append(wallId)\n\tinteriorWalls = tempList\n\n\n\t#Sort lists by level\n\tstorefrontFull = FilterElementsByLevel(doc, storefrontFull, selectedLevel)\n\tstorefrontPartial = FilterElementsByLevel(doc, storefrontPartial, selectedLevel)\n\tinteriorWalls = FilterElementsByLevel(doc, interiorWalls, selectedLevel)\n\tselectedColumns = FilterElementsByLevel(doc, allColumns, selectedLevel)\n\n\tif docEC:\n\t\tlevelElevationEC = None \n\t\tfor p in selectedLevelInst.Parameters:\n\t\t\tif p.Definition.Name == \"Elevation\":\n\t\t\t\tlevelElevationEC = p.AsDouble()\n\t\tselectedWallsEC = FilterElementsByLevel(docEC, allWallsEC, levelElevationEC)\n\t\tselectedColumnsEC = FilterElementsByLevel(docEC, allColumnsEC, levelElevationEC)\n\t\twallsLinesEdgesEC = GetWallEdgeCurves(docEC, selectedWallsEC, ecTransform)\n\t\tcolumnsLinesEdgesEC = GetColumnEdgeCurves(docEC, selectedColumnsEC, ecTransform)\n\n\tinteriorWallsLinesEdges = GetWallEdgeCurves(doc, interiorWalls, None)\n\tcolumnsLinesEdges = GetColumnEdgeCurves(doc, selectedColumns)\n\n\n\tlevelElevation = selectedLevelInst.Elevation\n\n\t#############################################\n\t # Prep #\n\t############################################# \n\n\t# Load configuration object\n\tstorefrontConfig = storefront_options()\n\tstorefrontConfig.storefront_set_config()\n\n\tsystemName = storefrontConfig.currentConfig[\"currentSystem\"]\n\n\tstorefrontPaneWidth = storefrontConfig.currentConfig[\"storefrontPaneWidth\"]\n\tstorefrontSpacingType = storefrontConfig.currentConfig[\"spacingType\"]\n\n\tmullionDict = GetMullionTypeDict()\n\tpanelTypeDict = GetWindowTypeDict()\n\t#doorDict = storefrontConfig.doorDict\n\tdoorDict = storefrontConfig.currentConfig[\"systemDoors\"]\n\twallTypeDict = GetWallTypeDict()\n\twallDoorHostDict = GetDoorDictByWallHost()\n\n\t#Ensure walltypes are loaded\n\tif not \"I-Storefront-\"+ systemName in wallTypeDict.keys():\n\t\tAutodesk.Revit.UI.TaskDialog.Show (\"ERROR\", \"Make sure you selected/loaded the correct partition system. Check your wall types.\")\n\t\tsys.exit()\n\n\t\n\t#TODO: verify mullions in project, if not then run the load tool.\n\n\t#Profile widths\n\tsystemPostWidth = doc.GetElement(mullionDict[systemName+\"_Post\"]).get_Parameter(BuiltInParameter.CUST_MULLION_THICK).AsDouble()\n\n\tsystemDoorFrame = doc.GetElement(mullionDict[systemName+\"_DoorFrame\"])\n\tsystemDoorFrameWidth = systemDoorFrame.get_Parameter(BuiltInParameter.CUST_MULLION_WIDTH2).AsDouble()\n\tsystemDoorFrameWidth += systemDoorFrame.get_Parameter(BuiltInParameter.CUST_MULLION_WIDTH1).AsDouble()\n\n\tsystemOneBy = doc.GetElement(mullionDict[systemName+\"_OneBy\"])\n\tsystemOneByWidth = systemOneBy.get_Parameter(BuiltInParameter.CUST_MULLION_WIDTH2).AsDouble()\n\tsystemOneByWidth += systemOneBy.get_Parameter(BuiltInParameter.CUST_MULLION_WIDTH1).AsDouble()\n\n\n\twallTypeCW = wallTypeDict[\"I-Storefront-\"+systemName]\n\n\tprogressIndex = 0.0\n\n\t#############################################\n\t # Wrap & Chain #\n\t#############################################\n\t\"\"\"\n\tTakes walls that are inline and makes them a single\n\twall element so that you dont get segmented walls that\n\tare supposed to be a single continuous elevation.\n\t\"\"\"\n\tassemblyId = startingAssembyId\n\tstorefrontElevations = []\n\tstorefrontFullAndPartial = []\n\n\tfor wallId in storefrontFull:\n\t\tstorefrontFullAndPartial.append([wallId,\"Full\"])\n\tfor wallId in storefrontPartial:\n\t\tstorefrontFullAndPartial.append([wallId,\"Partial\"])\n\n\t#--------------Make SF Objects---------------#\n\tfor item1 in storefrontFullAndPartial:\n\n\t\twallId1 = item1[0]\n\t\twallStorefrontType1 = item1[1]\n\n\t\twall1 = doc.GetElement(wallId1)\n\t\twall1LocationCurve = wall1.Location.Curve\n\t\t\n\t\twallDoors = []\n\t\twallHostIds = [wallId1]\n\n\t\tif wallId1 in wallDoorHostDict.keys():\n\t\t\twallDoors = wallDoorHostDict[wallId1]\n\t\t\n\n\t\t#--------------Chain Searching--------------#\n\n\t\t#Find neighbors and chain them if they are in-line.\n\t\tsearchingForChain = True\n\t\twhile searchingForChain:\n\n\t\t\tfoundNeighbor = False\n\t\t\twall1Start = wall1LocationCurve.GetEndPoint(0)\n\t\t\twall1End = wall1LocationCurve.GetEndPoint(1)\n\t\t\twall1Endpoints = [wall1Start, wall1End]\n\n\t\t\tfor item2 in storefrontFullAndPartial:\n\n\t\t\t\twallId2 = item2[0]\n\t\t\t\twallStorefrontType2 = item2[1]\n\n\t\t\t\tif wallId1 != wallId2 and wallStorefrontType1 == wallStorefrontType2:\n\t\t\t\t\twall2 = doc.GetElement(wallId2)\n\t\t\t\t\twall2LocationCurve = wall2.Location.Curve\n\t\t\t\t\twall2Start = wall2LocationCurve.GetEndPoint(0)\n\t\t\t\t\twall2End = wall2LocationCurve.GetEndPoint(1)\n\t\t\t\t\twall2Endpoints = [wall2Start, wall2End]\n\t\t\t\t\tfor i in range(len(wall1Endpoints)):\n\t\t\t\t\t\tpoint1a = wall1Endpoints[i]\n\t\t\t\t\t\tpoint1b = wall1Endpoints[i-1]\n\t\t\t\t\t\tfor j in range(len(wall2Endpoints)):\n\t\t\t\t\t\t\tpoint2a = wall2Endpoints[j]\n\t\t\t\t\t\t\tpoint2b = wall2Endpoints[j-1]\n\t\t\t\t\t\t\tdist = point1a.DistanceTo(point2a)\n\t\t\t\t\t\t\tif dist < absoluteTol:\n\t\t\t\t\t\t\t\tangle = AngleThreePoints(point1b, point1a, point2b)\n\t\t\t\t\t\t\t\t#print angle\n\t\t\t\t\t\t\t\tif abs(angle-180) < absoluteTol:\n\t\t\t\t\t\t\t\t\twallHostIds += [wallId2]\n\t\t\t\t\t\t\t\t\tstorefrontFullAndPartial.remove(item2)\n\t\t\t\t\t\t\t\t\tif wallId2 in wallDoorHostDict.keys():\n\t\t\t\t\t\t\t\t\t\twallDoors += wallDoorHostDict[wallId2]\n\t\t\t\t\t\t\t\t\twall1LocationCurve = Line.CreateBound(point1b, point2b)\n\t\t\t\t\t\t\t\t\tfoundNeighbor = True\n\t\t\t\t\t\t\t\t\tbreak\n\t\t\t\tif foundNeighbor:\n\t\t\t\t\tbreak\n\t\t\tif not foundNeighbor:\n\t\t\t\tsearchingForChain = False\n\n\t\t#--------------Create SF Object--------------#\n\t\tassemblyId += 1\n\n\t\tif wallStorefrontType1 == \"Full\":\n\t\t\tsillH = storefrontConfig.currentConfig[\"fullSillHeight\"]\n\t\telif wallStorefrontType1 == \"Partial\":\n\t\t\tif storefrontConfig.currentConfig[\"hasLowerInfill\"]:\n\t\t\t\tsillH = storefrontConfig.currentConfig[\"fullSillHeight\"]\n\t\t\telse:\n\t\t\t\tsillH = storefrontConfig.currentConfig[\"partialSillHeight\"]\n\n\n\t\theadH = storefrontConfig.currentConfig[\"headHeight\"]\n\t\tsfe = StorefrontElevation(wallHostIds, wall1LocationCurve, wallStorefrontType1, assemblyId, sillH, headH, systemName)\n\t\t#Doors\n\t\tif wallDoors:\n\t\t\tsfe.Doors = wallDoors\n\t\tstorefrontElevations.append(sfe)\n\n\n\t#############################################\n\t # Build #\n\t#############################################\n\n\tprint \"RUNNING...DO NOT CLOSE WINDOW...\"\n\n\twith rpw.db.TransactionGroup(\"Convert Wall\", assimilate=True) as tg:\n\n\t\t#Adjust any parameters to the walltype before creation if needed.\n\t\twith rpw.db.Transaction(\"Adjust CW Parameters\") as tx:\n\t\t\tSupressErrorsAndWarnings(tx)\n\n\t\t\t\n\t\t\twtCW = doc.GetElement(wallTypeCW)\n\t\t\tif storefrontConfig.currentConfig[\"deflectionHeadType\"] == 2:\n\t\t\t\twtCW.get_Parameter(BuiltInParameter.AUTO_MULLION_BORDER2_HORIZ).Set(mullionDict[systemName+\"_DeflectionHead-2\"])\n\t\t\telif storefrontConfig.currentConfig[\"deflectionHeadType\"] == 1:\n\t\t\t\twtCW.get_Parameter(BuiltInParameter.AUTO_MULLION_BORDER2_HORIZ).Set(mullionDict[systemName+\"_DeflectionHead-1\"])\n\n\t\tfor storefrontObject in storefrontElevations: \n\n\n\t\t\t#pyrevit progress bar\n\t\t\tprogressIndex += 1\n\t\t\toutput = script.get_output()\n\n\t\t\toutput.update_progress(progressIndex, len(storefrontElevations))\n\n\t\t\thostElement = doc.GetElement(storefrontObject.HostElementIds[0])\n\t\t\tstorefrontType = storefrontObject.SuperType\n\n\t\t\tbaseConstraint = hostElement.get_Parameter(BuiltInParameter.WALL_BASE_CONSTRAINT).AsElementId()\n\n\t\t\tlocLine = storefrontObject.HostLine\n\t\t\tlocLineStart = locLine.GetEndPoint(0)\n\t\t\tlocLineEnd = locLine.GetEndPoint(1)\n\n\t\t\tgridIntersectionPostPoints = []\n\n\t\t\twallHostId = storefrontObject.HostElementIds[0]\n\t\t\twtName = doc.GetElement(wallHostId).Name\n\n\t\t\tnewWall = None\n\n\t\t\tif str(hostElement.WallType.Kind) == \"Basic\": \n\n\t\t\t\t#############################################\n\t\t\t\t# Checks #\n\t\t\t\t#############################################\n\n\t\t\t #------------Interior Walls Edges------------#\n\n\t\t\t\tlocLine = storefrontObject.HostLine\n\t\t\t\tlocLineStart = locLine.GetEndPoint(0)\n\t\t\t\tlocLineEnd = locLine.GetEndPoint(1)\n\n\t\t\t\tfor intWallLine in interiorWallsLinesEdges:\n\t\t\t\t\tintersection = RevitCurveCurveIntersection(locLine,intWallLine)\n\n\t\t\t\t\tif intersection:\n\t\t\t\t\t\tdistToEnd = intersection.DistanceTo(locLineEnd) \n\t\t\t\t\t\tdistToStart = intersection.DistanceTo(locLineStart) \n\n\t\t\t\t\t\t#If intersection is at the ends\n\t\t\t\t\t\tif distToEnd < distTol:\n\t\t\t\t\t\t\tstorefrontObject.EndCondition = \"OnGyp\"\n\t\t\t\t\t\t\t# If intersection is not at the surface of the edges of interior walls\n\t\t\t\t\t\t\tif distToEnd > absoluteTol:\n\t\t\t\t\t\t\t\tstorefrontObject.Line = Line.CreateBound(locLineStart, intersection)\n\n\t\t\t\t\t\telif distToStart < distTol:\n\t\t\t\t\t\t\tstorefrontObject.StartCondition = \"OnGyp\"\n\t\t\t\t\t\t\tif distToStart > absoluteTol:\n\t\t\t\t\t\t\t\tstorefrontObject.Line = Line.CreateBound(intersection, locLineEnd)\n\n\t\t\t\t#----------Interior Walls Midspans-----------#\n\t\t\t\tfor intWallId in interiorWalls:\n\t\t\t\t\tintWall = doc.GetElement(intWallId)\n\t\t\t\t\tintWallLine = intWall.Location.Curve\n\t\t\t\t\tintersection = RevitCurveCurveIntersection(locLine,intWallLine)\n\t\t\t\t\tif intersection:\n\t\t\t\t\t\tdistToEnd = intersection.DistanceTo(locLineEnd) \n\t\t\t\t\t\tdistToStart = intersection.DistanceTo(locLineStart) \n\t\t\t\t\t\t#If intersection is at the ends\n\t\t\t\t\t\tif distToEnd > distTol and distToStart > distTol:\n\t\t\t\t\t\t\tgridIntersectionPostPoints.append(intersection)\n\n\n\n\n\t\t\t\t#------------------EC Walls------------------#\n\n\t\t\t\tlocLine = storefrontObject.HostLine\n\t\t\t\tlocLineStart = locLine.GetEndPoint(0)\n\t\t\t\tlocLineEnd = locLine.GetEndPoint(1)\n\t\t\t\tobstructionEdges = columnsLinesEdges\n\t\t\t\tif docEC:\n\t\t\t\t\tobstructionEdges += columnsLinesEdgesEC\n\t\t\t\t\tobstructionEdges += wallsLinesEdgesEC\n\t\t\t\tif obstructionEdges:\n\t\t\t\t\tfor obstructionLine in obstructionEdges:\n\t\t\t\t\t\tobstLineElevation = obstructionLine.GetEndPoint(0).Z\n\t\t\t\t\t\tlocLineStart = XYZ(locLineStart.X, locLineStart.Y, obstLineElevation)\n\t\t\t\t\t\tlocLineEnd = XYZ(locLineEnd.X, locLineEnd.Y, obstLineElevation)\n\t\t\t\t\t\tlocLineFlat = Line.CreateBound(locLineStart, locLineEnd)\n\t\t\t\t\t\tintersection = RevitCurveCurveIntersection(locLineFlat,obstructionLine)\n\t\t\t\t\t\tif intersection:\n\t\t\t\t\t\t\t#ERROR: Hit Existing Condition\n\t\t\t\t\t\t\tif intersection.DistanceTo(locLineEnd) < distTol:\n\t\t\t\t\t\t\t\tstorefrontObject.EndCondition = \"OnObstruction\"\n\t\t\t\t\t\t\telif intersection.DistanceTo(locLineStart) < distTol:\n\t\t\t\t\t\t\t\tstorefrontObject.StartCondition = \"OnObstruction\"\n\n\t\t\t\t\n\t\t\t\t####-------Storefront Intersections-------####\n\n\t\t\t\tlocLine = storefrontObject.HostLine\n\t\t\t\tlocLineStart = locLine.GetEndPoint(0)\n\t\t\t\tlocLineEnd = locLine.GetEndPoint(1)\n\n\n\t\t\t\t#---------------Find Neighbors---------------#\n\t\t\t\t#print storefrontObject.HostElementIds \n\t\t\t\tfor neighbor in storefrontElevations:\n\n\t\t\t\t\tif neighbor != storefrontObject:\n\t\t\t\t\t\tneighborLocLine = neighbor.HostLine\n\t\t\t\t\t\tneighborLocLineStart = neighborLocLine.GetEndPoint(0)\n\t\t\t\t\t\tneighborLocLineEnd = neighborLocLine.GetEndPoint(1)\n\t\t\t\t\t\tintersection = RevitCurveCurveIntersection(locLine,neighborLocLine)\n\t\t\t\t\t\t\n\t\t\t\t\t\tif intersection:\n\t\t\t\t\t\t\tpoint1 = None\n\t\t\t\t\t\t\tintersectionTypeOnNeighbor = None\n\n\t\t\t\t\t\t\t#Check where the intersection is occuring on the neighbor\n\t\t\t\t\t\t\tif intersection.DistanceTo(neighborLocLineStart) < distTol:\n\t\t\t\t\t\t\t\tintersectionTypeOnNeighbor = \"Start\"\n\t\t\t\t\t\t\t\tpoint1 = neighborLocLineEnd\n\t\t\t\t\t\t\telif intersection.DistanceTo(neighborLocLineEnd) < distTol:\n\t\t\t\t\t\t\t\tintersectionTypeOnNeighbor = \"End\"\n\t\t\t\t\t\t\t\tpoint1 = neighborLocLineStart\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tintersectionTypeOnNeighbor = \"Middle\"\n\t\t\t\t\t\t\t\tpoint1 = neighborLocLineEnd\n\n\t\t\t\t\t\t\t#Check if intersection is at the start point or end point or middle\n\t\t\t\t\t\t\tif intersection.DistanceTo(locLineStart) < tol:\n\t\t\t\t\t\t\t\tangle = AngleThreePoints(locLineEnd, intersection, point1)\n\t\t\t\t\t\t\t\tstorefrontObject.StartNeighbors.append([neighbor.AssemblyID, neighbor.SuperType, angle, intersectionTypeOnNeighbor, intersection])\n\n\t\t\t\t\t\t\telif intersection.DistanceTo(locLineEnd) < tol:\n\t\t\t\t\t\t\t\tangle = AngleThreePoints(locLineStart, intersection, point1)\n\t\t\t\t\t\t\t\tstorefrontObject.EndNeighbors.append([neighbor.AssemblyID, neighbor.SuperType, angle, intersectionTypeOnNeighbor, intersection])\n\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t#Interesection isnt ocurring at the ends.\n\t\t\t\t\t\t\t\tgridIntersectionPostPoints.append(intersection)\n\n\t\t\t\t\t\t\t\t#If the intersections for both lines are on the middles for eachother.\n\t\t\t\t\t\t\t\tif intersectionTypeOnNeighbor == \"Middle\":\n\n\t\t\t\t\t\t\t\t\t#Split the intersecting neighbor into two segments so the walls dont overlap\n\t\t\t\t\t\t\t\t\tneighborLocLineStart = neighborLocLine.GetEndPoint(0)\n\t\t\t\t\t\t\t\t\tneighborLocLineEnd = neighborLocLine.GetEndPoint(1)\n\t\t\t\t\t\t\t\t\tneighbor.Line = Line.CreateBound(intersection, neighborLocLineStart)\n\t\t\t\t\t\t\t\t\tneighbor.HostLine = Line.CreateBound(intersection, neighborLocLineStart)\n\n\t\t\t\t\t\t\t\t\t#Create another neighbor thats split\n\t\t\t\t\t\t\t\t\tnewNeighborIndex = len(storefrontElevations)+1\n\t\t\t\t\t\t\t\t\tnewNeighborHostElementIds = neighbor.HostElementIds\n\t\t\t\t\t\t\t\t\tnewNeighborSillHeight = neighbor.SillHeight\n\t\t\t\t\t\t\t\t\tnewNeighborHeadHeight = neighbor.HeadHeight\n\t\t\t\t\t\t\t\t\tsplitNeighborLine = Line.CreateBound(intersection, neighborLocLineEnd)\n\t\t\t\t\t\t\t\t\tsplitNeighbor = StorefrontElevation(newNeighborHostElementIds, splitNeighborLine, neighbor.SuperType, newNeighborIndex, newNeighborSillHeight, newNeighborHeadHeight, systemName)\n\t\t\t\t\t\t\t\t\tstorefrontElevations.append(splitNeighbor)\n\n\t\t\t\t\t\t\t\t\t#Make sure that each new segment has the correct doors on each one\n\t\t\t\t\t\t\t\t\tif neighbor.Doors:\n\t\t\t\t\t\t\t\t\t\tdoorsOnNeighbor = neighbor.Doors\n\t\t\t\t\t\t\t\t\t\ttempList1 = []\n\t\t\t\t\t\t\t\t\t\ttempList2 = []\n\t\t\t\t\t\t\t\t\t\tfor neighborDoorId in doorsOnNeighbor:\n\t\t\t\t\t\t\t\t\t\t\tneighborDoor = doc.GetElement(neighborDoorId)\n\t\t\t\t\t\t\t\t\t\t\tdoorPoint = neighborDoor.Location.Point\n\t\t\t\t\t\t\t\t\t\t\tif RevitPointOnLine2D(doorPoint, neighbor.Line):\n\t\t\t\t\t\t\t\t\t\t\t\ttempList1.append(neighborDoorId)\n\t\t\t\t\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t\t\t\t\ttempList2.append(neighborDoorId)\n\t\t\t\t\t\t\t\t\t\tneighbor.Doors = tempList1\n\t\t\t\t\t\t\t\t\t\tsplitNeighbor.Doors = tempList2\n\t\t\t\t\n\t\t\t\t#-----------Determine Conditions-----------#\n\n\t\t\t\t###------------Start Condition-----------###\n\t\t\t\tlocLine = storefrontObject.HostLine\n\t\t\t\tlocLineStart = locLine.GetEndPoint(0)\n\t\t\t\tlocLineEnd = locLine.GetEndPoint(1)\n\n\t\t\t\tstartAndEndNeighbors = [storefrontObject.StartNeighbors, storefrontObject.EndNeighbors]\n\n\t\t\t\tfor i in range(len(startAndEndNeighbors)):\n\n\t\t\t\t\tneighborSet = startAndEndNeighbors[i]\n\t\t\t\t\tcornerCount = 0\n\t\t\t\t\tinlineCount = 0\n\t\t\t\t\tcornerTypes = []\n\t\t\t\t\tinlineTypes = []\n\t\t\t\t\tconditionAngleOffset = None\n\t\t\t\t\tconditionToSet = None\n\n\t\t\t\t\tif neighborSet:\n\n\t\t\t\t\t\tfor neighbor in neighborSet:\n\t\t\t\t\t\t\tangle = neighbor[2]\n\t\t\t\t\t\t\tintersectionType = neighbor[3]\n\t\t\t\t\t\t\tintersection = neighbor[4]\n\n\t\t\t\t\t\t\t#---Corner Test---#\n\t\t\t\t\t\t\tif abs(angle-90) < angleTol:\n\t\t\t\t\t\t\t\tif neighbor[1] != storefrontType:\n\t\t\t\t\t\t\t\t\tif intersectionType == \"Middle\":\n\t\t\t\t\t\t\t\t\t\tconditionToSet = \"OnStorefront\"\n\t\t\t\t\t\t\t\t\t\tcornerTypes.append(\"Different\")\n\t\t\t\t\t\t\t\t\t\tcornerCount += 2\n\t\t\t\t\t\t\t\t\telif intersectionType == \"Start\" or intersectionType == \"End\":\n\t\t\t\t\t\t\t\t\t\tcornerTypes.append(\"Different\")\n\t\t\t\t\t\t\t\t\t\tcornerCount += 1\n\n\t\t\t\t\t\t\t\telif neighbor[1] == storefrontType:\n\t\t\t\t\t\t\t\t\t# If the storefront is connected to the middle of another storefront\n\t\t\t\t\t\t\t\t\t# that is the of the same type, then it should join\n\t\t\t\t\t\t\t\t\tif intersectionType == \"Middle\":\n\t\t\t\t\t\t\t\t\t\tconditionToSet = \"JoinStorefront\"\n\t\t\t\t\t\t\t\t\t\tcornerTypes.append(\"Same\")\n\t\t\t\t\t\t\t\t\t\tcornerCount += 2\n\n\t\t\t\t\t\t\t\t\telif intersectionType == \"Start\" or intersectionType == \"End\":\n\t\t\t\t\t\t\t\t\t\tcornerTypes.append(\"Same\")\n\t\t\t\t\t\t\t\t\t\tcornerCount += 1\n\n\t\t\t\t\t\t\t#---Inline Test---#\n\t\t\t\t\t\t\telif abs(angle-180) < angleTol:\n\t\t\t\t\t\t\t\tif neighbor[1] != storefrontType:\n\t\t\t\t\t\t\t\t\tinlineTypes.append(\"Different\")\n\t\t\t\t\t\t\t\t\tinlineCount += 1 \n\t\t\t\t\t\t\t\telif neighbor[1] == storefrontType:\n\t\t\t\t\t\t\t\t\tinlineTypes.append(\"Same\")\n\t\t\t\t\t\t\t\t\t#Placeholder just in case\n\t\t\t\t\t\t\t\t\tpass\n\n\t\t\t\t\t\t\t#---Angled Test---#\n\t\t\t\t\t\t\telif abs(round(neighbor[2],1) % 90) > angleTol:\n\t\t\t\t\t\t\t\treverse = 0\n\t\t\t\t\t\t\t\tif locLineStart.X > locLineEnd.X: \n\t\t\t\t\t\t\t\t\treverse = 180\n\t\t\t\t\t\t\t\tangleRadians = (neighbor[2] * (2 * math.pi)) / 360\n\t\t\t\t\t\t\t\tconditionAngleOffset = (0.5 * systemPostWidth) / math.tan((angleRadians) * 0.5)\n\t\t\t\t\t\t\t\tconditionToSet = \"Angled\"\n\t\t\t\t\t\t\t\tif storefrontConfig.currentConfig[\"isFramed\"]:\n\t\t\t\t\t\t\t\t\tif i == 0:\n\t\t\t\t\t\t\t\t\t\tvect = RevitTransVector(locLineEnd, locLineStart, magnitude=conditionAngleOffset)\n\t\t\t\t\t\t\t\t\t\tlocLineStart = locLineStart.Add(vect)\n\t\t\t\t\t\t\t\t\t\tstorefrontObject.Line = Line.CreateBound(locLineStart, storefrontObject.Line.GetEndPoint(1))\n\n\t\t\t\t\t\t\t\t\telif i == 1:\n\t\t\t\t\t\t\t\t\t\tvect = RevitTransVector(locLineStart, locLineEnd, magnitude=conditionAngleOffset)\n\t\t\t\t\t\t\t\t\t\tlocLineEnd = locLineEnd.Add(vect)\n\t\t\t\t\t\t\t\t\t\tstorefrontObject.Line = Line.CreateBound(storefrontObject.Line.GetEndPoint(0), locLineEnd)\n\t\t\t\t\t\t\t\tbreak\n\n\t\t\t\t\t\t#---Compound Conditions---#\n\t\t\t\t\t\tif cornerCount == 0 and inlineCount == 1:\n\t\t\t\t\t\t\tif \"Same\" in inlineTypes:\n\t\t\t\t\t\t\t\tpass\n\t\t\t\t\t\t\telif \"Different\" in inlineTypes:\n\t\t\t\t\t\t\t\tif storefrontType == \"Full\":\n\t\t\t\t\t\t\t\t\tconditionToSet = \"ForcePost\"\n\t\t\t\t\t\t\t\telif storefrontType == \"Partial\":\n\t\t\t\t\t\t\t\t\tconditionToSet = \"OnStorefront\"\n\n\t\t\t\t\t\telif cornerCount == 1 and inlineCount == 0:\n\t\t\t\t\t\t\tif \"Same\" in cornerTypes:\n\t\t\t\t\t\t\t\tconditionToSet = None\n\t\t\t\t\t\t\telif \"Different\" in cornerTypes:\n\t\t\t\t\t\t\t\tif storefrontType == \"Full\":\n\t\t\t\t\t\t\t\t\tconditionToSet = None\n\t\t\t\t\t\t\t\telif storefrontType == \"Partial\":\n\t\t\t\t\t\t\t\t\tconditionToSet = \"OnStorefront\"\n\t\t\t\t\t\t\telse: \n\t\t\t\t\t\t\t\tpass\n\n\t\t\t\t\t\telif cornerCount == 1 and inlineCount == 1:\n\t\t\t\t\t\t\tif \"Same\" in cornerTypes:\n\t\t\t\t\t\t\t\tconditionToSet = \"JoinStorefront\"\n\t\t\t\t\t\t\t\tif i == 0:\n\t\t\t\t\t\t\t\t\tvect = RevitTransVector(locLineEnd, locLineStart, magnitude=systemPostWidth/2)\n\t\t\t\t\t\t\t\t\tlocLineStart = locLineStart.Add(vect)\n\t\t\t\t\t\t\t\t\tstorefrontObject.Line = Line.CreateBound(locLineStart, storefrontObject.Line.GetEndPoint(1))\n\n\t\t\t\t\t\t\t\telif i == 1:\n\t\t\t\t\t\t\t\t\tvect = RevitTransVector(locLineStart, locLineEnd, magnitude=systemPostWidth/2)\n\t\t\t\t\t\t\t\t\tlocLineEnd = locLineEnd.Add(vect)\n\t\t\t\t\t\t\t\t\tstorefrontObject.Line = Line.CreateBound(storefrontObject.Line.GetEndPoint(0), locLineEnd)\n\n\t\t\t\t\t\t\telif \"Different\" in cornerTypes:\n\t\t\t\t\t\t\t\tconditionToSet = \"OnStorefront\"\n\t\t\t\t\t\t\telse: \n\t\t\t\t\t\t\t\tpass\n\n\t\t\t\t\t\telif cornerCount == 2 and inlineCount == 0:\n\t\t\t\t\t\t\tif not \"Different\" in cornerTypes:\n\t\t\t\t\t\t\t\tconditionToSet = \"JoinStorefront\"\n\t\t\t\t\t\t\t\tif i == 0:\n\t\t\t\t\t\t\t\t\tvect = RevitTransVector(locLineEnd, locLineStart, magnitude=systemPostWidth/2)\n\t\t\t\t\t\t\t\t\tlocLineStart = locLineStart.Add(vect)\n\t\t\t\t\t\t\t\t\tstorefrontObject.Line = Line.CreateBound(locLineStart, storefrontObject.Line.GetEndPoint(1))\n\n\t\t\t\t\t\t\t\telif i == 1:\n\t\t\t\t\t\t\t\t\tvect = RevitTransVector(locLineStart, locLineEnd, magnitude=systemPostWidth/2)\n\t\t\t\t\t\t\t\t\tlocLineEnd = locLineEnd.Add(vect)\n\t\t\t\t\t\t\t\t\tstorefrontObject.Line = Line.CreateBound(storefrontObject.Line.GetEndPoint(0), locLineEnd)\n\n\t\t\t\t\t\t\telif \"Same\" in cornerTypes and \"Different\" in cornerTypes:\n\t\t\t\t\t\t\t\tconditionToSet = \"ForcePostAtTBone\"\n\t\t\t\t\t\t\t\tif i == 0:\n\t\t\t\t\t\t\t\t\tvect = RevitTransVector(locLineStart, locLineEnd, magnitude=systemPostWidth/2)\n\t\t\t\t\t\t\t\t\tlocLineStart = locLineStart.Add(vect)\n\t\t\t\t\t\t\t\t\tstorefrontObject.Line = Line.CreateBound(locLineStart, storefrontObject.Line.GetEndPoint(1))\n\n\t\t\t\t\t\t\t\telif i == 1:\n\t\t\t\t\t\t\t\t\tvect = RevitTransVector(locLineEnd, locLineStart, magnitude=systemPostWidth/2)\n\t\t\t\t\t\t\t\t\tlocLineEnd = locLineEnd.Add(vect)\n\t\t\t\t\t\t\t\t\tstorefrontObject.Line = Line.CreateBound(storefrontObject.Line.GetEndPoint(0), locLineEnd)\n\n\t\t\t\t\t\telif cornerCount == 2 and inlineCount == 1:\n\t\t\t\t\t\t\tif \"Same\" in cornerTypes and \"Different\" in cornerTypes and \"Different\" in inlineTypes:\n\t\t\t\t\t\t\t\tpass\n\n\t\t\t\t\t#Logic gate to set contidions to the right ends either start of end.\n\t\t\t\t\tif i == 0 and neighborSet:\n\t\t\t\t\t\tstorefrontObject.StartCondition = conditionToSet\n\t\t\t\t\t\t\n\t\t\t\t\t\tif conditionAngleOffset:\n\t\t\t\t\t\t\tstorefrontObject.StartAngledOffset = conditionAngleOffset\n\t\t\t\t \n\t\t\t\t\telif i == 1 and neighborSet:\n\t\t\t\t\t\tstorefrontObject.EndCondition = conditionToSet\n\t\t\t\t\t\t\n\t\t\t\t\t\tif conditionAngleOffset:\n\t\t\t\t\t\t\tstorefrontObject.EndAngledOffset = conditionAngleOffset\n\n\n\t\t\t\t#############################################\n\t\t\t\t# Creation #\n\t\t\t\t#############################################\n\n\t\t\t\t#--------------Curtain Wall-----------------#\n\t\t\t\twith rpw.db.Transaction(\"Create Curtain Wall\") as tx:\n\t\t\t\t\tSupressErrorsAndWarnings(tx)\n\t\t\t\t\tnewWallHeadHeight = storefrontObject.HeadHeight \n\t\t\t\t\tnewWallLine = storefrontObject.Line\n\t\t\t\t\tnewWall = Wall.Create(doc, newWallLine, wallTypeCW, baseConstraint, newWallHeadHeight, 0, False, False)\n\t\t\t\t\tnewWall.get_Parameter(BuiltInParameter.WALL_ATTR_ROOM_BOUNDING).Set(0)\n\n\t\t\t\t\t#Set new CW Id to storefrontObject object \n\t\t\t\t\tstorefrontObject.CWElementId = newWall.Id\n\n\t\t\t\t\tdoc.Regenerate()\n\t\t\t\t\t\n\t\t\t\t\tif storefrontConfig.currentConfig[\"isFramed\"]:\n\t\t\t\t\t\tif storefrontObject.StartCondition == \"Angled\":\n\t\t\t\t\t\t\tWallUtils.DisallowWallJoinAtEnd(newWall, 0)\n\t\t\t\t\t\tif storefrontObject.EndCondition == \"Angled\":\n\t\t\t\t\t\t\tWallUtils.DisallowWallJoinAtEnd(newWall, 1)\n\n\t\t\t\t\tconditionsList = [storefrontObject.StartCondition, storefrontObject.EndCondition]\n\n\t\t\t\t\t#print storefrontObject.SuperType\n\t\t\t\t\t#print \"start - \" + str(storefrontObject.StartCondition)\n\t\t\t\t\t#print \"end - \" + str(storefrontObject.EndCondition)\n\t\t\t\t\t\n\t\t\t\t\tfor i in range(len(conditionsList)):\n\t\t\t\t\t\tcondition = conditionsList[i]\n\t\t\t\t\t\tnewWall_grid = newWall.CurtainGrid\n\t\t\t\t\t\tnewWallPoint = newWall.Location.Curve.GetEndPoint(i)\n\t\t\t\t\t\tmullionList = GetVerticalMullionsAtPoint(newWall_grid, newWallPoint, detectionTolerance=0.5)\n\n\t\t\t\t\t\tif mullionList:\n\t\t\t\t\t\t\tfor mul in mullionList:\n\t\t\t\t\t\t\t\tmul.Pinned = False\n\n\t\t\t\t\t\t\t\tif condition == \"OnGyp\":\n\t\t\t\t\t\t\t\t\tmul.ChangeTypeId(mullionDict[systemName + \"_WallStart\"])\n\n\t\t\t\t\t\t\t\telif condition == \"OnObstruction\":\n\t\t\t\t\t\t\t\t\tmul.ChangeTypeId(mullionDict[systemName + \"_WallStart\"])\n\n\t\t\t\t\t\t\t\telif condition == \"OnStorefront\":\n\t\t\t\t\t\t\t\t\tmul.ChangeTypeId(mullionDict[systemName + \"_WallStart\"])\n\n\t\t\t\t\t\t\t\telif condition == \"JoinStorefront\":\n\t\t\t\t\t\t\t\t\tdoc.Delete(mul.Id)\n\n\t\t\t\t\t\t\t\telif condition == \"ForcePost\":\n\t\t\t\t\t\t\t\t\tmul.ChangeTypeId(mullionDict[systemName + \"_Post\"])\n\n\t\t\t\t\t\t\t\telif condition == \"ForcePostAtTBone\":\n\t\t\t\t\t\t\t\t\tmul.ChangeTypeId(mullionDict[systemName + \"_Post\"])\n\n\t\t\t\t\t\t\t\telif condition == \"Angled\":\n\t\t\t\t\t\t\t\t\tif storefrontConfig.currentConfig[\"isFramed\"]:\n\t\t\t\t\t\t\t\t\t\tmul.ChangeTypeId(mullionDict[systemName + \"_OneBy\"])\n\t\t\t\t\t\t\t\t\telse: \n\t\t\t\t\t\t\t\t\t\tdoc.Delete(mul.Id)\n\t\t\t\t\n\n\n\t\t\t\t#############################################\n\t\t\t\t# Modifications #\n\t\t\t\t#############################################\n\t\t\t\t\n\t\t\t\t\n\t\t\t\t#-----------Lower Infill Panels-------------#\n\n\t\t\t\tnewWall_grid = newWall.CurtainGrid\n\n\t\t\t\t#Create lower infill panel and sill\n\t\t\t\tif storefrontConfig.currentConfig[\"hasLowerInfill\"]:\n\n\t\t\t\t\tnewWallMidPoint = newWall.Location.Curve.Evaluate(0.5, True)\n\t\t\t\t\tnewWall_grid = newWall.CurtainGrid\n\t\t\t\t\tif storefrontObject.SuperType == \"Partial\":\n\t\t\t\t\t\twith rpw.db.Transaction(\"Create Lower Infill Panels\") as tx:\n\t\t\t\t\t\t\tSupressErrorsAndWarnings(tx)\n\t\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t\tgridPt = XYZ(newWallMidPoint.X, newWallMidPoint.Y, newWallMidPoint.Z + storefrontConfig.currentConfig[\"partialSillHeight\"] )\n\t\t\t\t\t\t\t\tgrid0 = newWall_grid.AddGridLine(True, gridPt, False)\n\t\t\t\t\t\t\texcept:\n\t\t\t\t\t\t\t\tpass\n\n\t\t\t\t\t\t\t# Create Solid Lower Panels\n\t\t\t\t\t\t\tdoc.Regenerate()\n\t\t\t\t\t\t\tnewWall_grid = newWall.CurtainGrid\n\t\t\t\t\t\t\tuGridIds = newWall_grid.GetVGridLineIds()\n\t\t\t\t\t\t\tnewWallLocationCurve = newWall.Location.Curve\n\t\t\t\t\t\t\tverticalGridPoints = []\n\n\t\t\t\t\t\t\tfor uGridId in uGridIds:\n\t\t\t\t\t\t\t\tuGrid = doc.GetElement(uGridId)\n\t\t\t\t\t\t\t\tuGridOrigin = uGrid.FullCurve.Origin\n\t\t\t\t\t\t\t\tverticalGridPoints.append(XYZ(uGridOrigin.X, uGridOrigin.Y, newWallMidPoint.Z))\n\t\t\t\t\t\t\tsplitCurves = RevitSplitLineAtPoints(newWallLocationCurve, verticalGridPoints)\n\n\t\t\t\t\t\t\tfor sCurve in splitCurves:\n\t\t\t\t\t\t\t\tsCurveMidpoint = sCurve.Evaluate(0.5, True)\n\t\t\t\t\t\t\t\tpanelIds = RevitCurtainPanelsAtPoint(newWall_grid, sCurveMidpoint, detectionTolerance=0.1)\n\t\t\t\t\t\t\t\tpanelElevationTupleList = []\n\t\t\t\t\t\t\t\tfor panelId in panelIds:\n\t\t\t\t\t\t\t\t\tpanel = doc.GetElement(panelId)\n\t\t\t\t\t\t\t\t\tpanelElevationTupleList.append((panel,float(panel.Transform.Origin.Z)))\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\tpanelElevationTupleList = sorted(panelElevationTupleList, key=lambda x: x[1])\n\n\t\t\t\t\t\t\t\t#Gets lowest panel and change to solid\n\t\t\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t\t\tpanelToChange = panelElevationTupleList[0][0]\n\t\t\t\t\t\t\t\t\tpanelToChange.Pinned = False\n\t\t\t\t\t\t\t\t\tpanelToChange.ChangeTypeId(panelTypeDict[storefrontConfig.currentConfig[\"panelLowerInfill\"]])\n\t\t\t\t\t\t\t\texcept:\n\t\t\t\t\t\t\t\t\tpass\n\n\n\t\t\t\t#---------------Special Horizontals---------------#\n\t\t\t\tspecialHorizontals = storefrontConfig.currentConfig[\"specialHorizontalMullions\"]\n\t\t\t\tif specialHorizontals:\n\t\t\t\t\tfor key, value in specialHorizontals.items():\n\t\t\t\t\t\tif key in wtName:\n\t\t\t\t\t\t\tnewWallMidPoint = newWall.Location.Curve.Evaluate(0.5, True)\n\t\t\t\t\t\t\tnewWall_grid = newWall.CurtainGrid\n\t\t\t\t\t\t\twith rpw.db.Transaction(\"Create Special Horizontal\") as tx:\n\t\t\t\t\t\t\t\tSupressErrorsAndWarnings(tx)\n\t\t\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t\t\tgridPt = XYZ(newWallMidPoint.X, newWallMidPoint.Y, newWallMidPoint.Z + value[0])\n\t\t\t\t\t\t\t\t\tgrid0 = newWall_grid.AddGridLine(True, gridPt, False)\n\t\t\t\t\t\t\t\texcept:\n\t\t\t\t\t\t\t\t\tpass\n\n\t\t\t\t#-----------Midspan Intersections (posts)----------#\n\n\t\t\t\tnewWall_grid = newWall.CurtainGrid\n\t\t\t\tif gridIntersectionPostPoints:\n\t\t\t\t\twith rpw.db.Transaction(\"Create Intersection Grids\") as tx:\n\t\t\t\t\t\tSupressErrorsAndWarnings(tx)\n\t\t\t\t\t\tfor gridIntersectionPoint in gridIntersectionPostPoints:\n\t\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t\tgridInt = newWall_grid.AddGridLine(False, gridIntersectionPoint, False)\n\t\t\t\t\t\t\t\tmullionIntList = GetVerticalMullionsAtPoint(newWall_grid, gridIntersectionPoint, detectionTolerance=0.001)\n\t\t\t\t\t\t\t\tif mullionIntList:\n\t\t\t\t\t\t\t\t\tfor mullion3 in mullionIntList:\n\t\t\t\t\t\t\t\t\t\tmullion3.Pinned = False\n\t\t\t\t\t\t\t\t\t\tmullion3.ChangeTypeId(mullionDict[storefrontConfig.currentConfig[\"midspanIntersectionMullion\"]])\n\t\t\t\t\t\t\texcept:\n\t\t\t\t\t\t\t\tpass\n\n\t\t\t\t\t\t\t \n\t\t\t\t#-------------------Modify Ends-------------------#\n\t\t\t\t\n\t\t\t\twith rpw.db.Transaction(\"Modify Ends\") as tx:\n\t\t\t\t\tSupressErrorsAndWarnings(tx)\n\t\t\t\t\t#Disallow as needed:\n\n\n\t\t\t\t\tif storefrontConfig.currentConfig[\"isFramed\"]:\n\t\t\t\t\t\tif storefrontObject.StartCondition == \"Angled\":\n\t\t\t\t\t\t\tWallUtils.DisallowWallJoinAtEnd(newWall, 0)\n\t\t\t\t\t\tif storefrontObject.EndCondition == \"Angled\":\n\t\t\t\t\t\t\tWallUtils.DisallowWallJoinAtEnd(newWall, 1)\n\n\t\t\t\t\tdoc.Regenerate()\n\n\t\t\t\t\tconditionsList = [storefrontObject.StartCondition, storefrontObject.EndCondition]\n\n\t\t\t\t\t#print storefrontObject.SuperType\n\t\t\t\t\t#print \"start - \" + str(storefrontObject.StartCondition)\n\t\t\t\t\t#print \"end - \" + str(storefrontObject.EndCondition)\n\t\t\t\t\t\n\t\t\t\t\tfor i in range(len(conditionsList)):\n\t\t\t\t\t\tcondition = conditionsList[i]\n\t\t\t\t\t\tnewWall_grid = newWall.CurtainGrid\n\t\t\t\t\t\tnewWallPoint = newWall.Location.Curve.GetEndPoint(i)\n\t\t\t\t\t\tmullionList = GetVerticalMullionsAtPoint(newWall_grid, newWallPoint, detectionTolerance=0.5, searchOnlySelf=True)\n\n\t\t\t\t\t\tif mullionList:\n\t\t\t\t\t\t\tfor mul in mullionList:\n\t\t\t\t\t\t\t\tmul.Pinned = False\n\n\t\t\t\t\t\t\t\tif condition == \"OnGyp\":\n\t\t\t\t\t\t\t\t\tmul.ChangeTypeId(mullionDict[systemName + \"_WallStart\"])\n\n\t\t\t\t\t\t\t\telif condition == \"OnObstruction\":\n\t\t\t\t\t\t\t\t\tmul.ChangeTypeId(mullionDict[systemName + \"_WallStart\"])\n\n\t\t\t\t\t\t\t\telif condition == \"OnStorefront\":\n\t\t\t\t\t\t\t\t\tmul.ChangeTypeId(mullionDict[systemName + \"_WallStart\"])\n\n\t\t\t\t\t\t\t\telif condition == \"JoinStorefront\":\n\t\t\t\t\t\t\t\t\tdoc.Delete(mul.Id)\n\n\t\t\t\t\t\t\t\telif condition == \"ForcePost\":\n\t\t\t\t\t\t\t\t\tmul.ChangeTypeId(mullionDict[systemName + \"_Post\"])\n\n\t\t\t\t\t\t\t\telif condition == \"ForcePostAtTBone\":\n\t\t\t\t\t\t\t\t\tmul.ChangeTypeId(mullionDict[systemName + \"_Post\"])\n\n\t\t\t\t\t\t\t\telif condition == \"Angled\":\n\t\t\t\t\t\t\t\t\tif storefrontConfig.currentConfig[\"isFramed\"]:\n\t\t\t\t\t\t\t\t\t\tmul.ChangeTypeId(mullionDict[systemName + \"_OneBy\"])\n\t\t\t\t\t\t\t\t\telse: \n\t\t\t\t\t\t\t\t\t\tdoc.Delete(mul.Id)\n\n\n\n\t\t\t\t#-----------------Glazing Panel Types----------------#\n\t\t\t\t\n\t\t\t\tchangeToPanel = None\n\n\t\t\t\tif \"Demising\" in wtName:\n\t\t\t\t\tchangeToPanel = storefrontConfig.currentConfig[\"panelGlazedCenter\"]\n\t\t\t\telif \"Offset\" in wtName:\n\t\t\t\t\tchangeToPanel = storefrontConfig.currentConfig[\"panelGlazedOffset\"]\n\t\t\t\telif \"Double\" in wtName:\n\t\t\t\t\tchangeToPanel = storefrontConfig.currentConfig[\"panelGlazedDouble\"]\n\t\t\t\telse:\n\t\t\t\t\tpass\n\n\t\t\t\tif changeToPanel:\n\t\t\t\t\twith rpw.db.Transaction(\"Change Glazing Types\") as tx:\n\t\t\t\t\t\tSupressErrorsAndWarnings(tx)\n\t\t\t\t\t\tdoc.Regenerate()\n\t\t\t\t\t\tpanels = newWall_grid.GetPanelIds()\n\t\t\t\t\t\tfor panelToChangeId in panels:\n\t\t\t\t\t\t\tpanelToChange = doc.GetElement(panelToChangeId)\n\t\t\t\t\t\t\tpanelToChange.Pinned = False\n\t\t\t\t\t\t\tpanelToChange.ChangeTypeId(panelTypeDict[changeToPanel])\n\n\n\n\n\t\t\t\t#-------------------Doors------------------#\n\t\t\t\t\n\t\t\t\tif storefrontObject.Doors:\n\t\t\t\t\tnewWallStartPoint = newWall.Location.Curve.GetEndPoint(0)\n\t\t\t\t\tnewWallEndPoint = newWall.Location.Curve.GetEndPoint(1)\n\t\t\t\t\tdoorsOnWall = storefrontObject.Doors\n\n\t\t\t\t\twith rpw.db.Transaction(\"Create Door Grids 0\") as tx:\n\t\t\t\t\t\tSupressErrorsAndWarnings(tx)\n\n\t\t\t\t\t\tfor doorId in doorsOnWall:\n\n\t\t\t\t\t\t\t#Location info\n\t\t\t\t\t\t\tdoor = doc.GetElement(doorId)\n\t\t\t\t\t\t\tdoorName = door.Name\n\t\t\t\t\t\t\tdoorLocationCenter = door.Location.Point\n\t\t\t\t\t\t\tdoorLocationRotation = door.Location.Rotation\n\t\t\t\t\t\t\tdoorHandOrientation = door.HandOrientation\n\n\t\t\t\t\t\t\t#Defaults\n\t\t\t\t\t\t\tdoorHand = \"R\"\n\t\t\t\t\t\t\tdoorWidth = 1.0\n\t\t\t\t\t\t\tdoorType = \"SWING\"\n\n\t\t\t\t\t\t\t#Get specific door info based on registered doors in the config.\n\t\t\t\t\t\t\tif doorDict.get(doorName):\n\n\t\t\t\t\t\t\t\tdoorDetails = doorDict[doorName]\n\t\t\t\t\t\t\t\tdoorHand = doorDetails[0]\n\t\t\t\t\t\t\t\tdoorWidth = doorDetails[1]\n\t\t\t\t\t\t\t\tdoorType = doorDetails[2]\n\n\t\t\t\t\t\t\t\tframeMullion0 = mullionDict[systemName + doorDetails[3]]\n\t\t\t\t\t\t\t\tframeMullion1 = mullionDict[systemName + doorDetails[4]]\n\t\t\t\t\t\t\t\textraAdjustment0 = doorDetails[5]\n\t\t\t\t\t\t\t\textraAdjustment1 = doorDetails[6]\n\n\t\t\t\t\t\t\telse: \n\n\t\t\t\t\t\t\t\t#Defaults if no door is found\n\t\t\t\t\t\t\t\tframeMullion0 = mullionDict[systemName + \"_DoorFrame\"]\n\t\t\t\t\t\t\t\tframeMullion1 = mullionDict[systemName + \"_DoorFrame\"]\n\n\t\t\t\t\t\t\t\t#Fine adjustments for mullion position\n\t\t\t\t\t\t\t\textraAdjustment0 = 0\n\t\t\t\t\t\t\t\textraAdjustment1 = 0\n\t\t\t\t\t\t\t\tprint \"ISSUE: Unable to recognize door - \" + doorName\n\n\n\t\t\t\t\t\t\t#Get offset widths for door frame mullions\n\t\t\t\t\t\t\tfm0 = doc.GetElement(frameMullion0)\n\t\t\t\t\t\t\tframeMullion0Width = fm0.get_Parameter(BuiltInParameter.CUST_MULLION_WIDTH2).AsDouble()\n\t\t\t\t\t\t\tframeMullion0Width += fm0.get_Parameter(BuiltInParameter.CUST_MULLION_WIDTH1).AsDouble()\n\n\t\t\t\t\t\t\tfm1 = doc.GetElement(frameMullion1)\n\t\t\t\t\t\t\tframeMullion1Width = fm1.get_Parameter(BuiltInParameter.CUST_MULLION_WIDTH2).AsDouble()\n\t\t\t\t\t\t\tframeMullion1Width += fm1.get_Parameter(BuiltInParameter.CUST_MULLION_WIDTH1).AsDouble()\n\n\t\t\t\t\t\t\t#Accounting for mullion CUST_MULLION_THICKnesses \n\t\t\t\t\t\t\textra0 = (frameMullion0Width * 0.5) + extraAdjustment0\n\t\t\t\t\t\t\textra1 = (frameMullion1Width * 0.5) + extraAdjustment1\n\n\t\t\t\t\t\t\t#Vectors to move location point\n\t\t\t\t\t\t\tvect0 = doorHandOrientation.Multiply(((doorWidth / 2) + extra0))\n\t\t\t\t\t\t\tvect1 = doorHandOrientation.Multiply(((doorWidth / 2) + extra1) * -1)\n\n\t\t\t\t\t\t\t#Door end points\n\t\t\t\t\t\t\tdoor_end0 = doorLocationCenter.Add(vect0)\n\t\t\t\t\t\t\tdoor_end1 = doorLocationCenter.Add(vect1)\n\n\n\t\t\t\t\t\t\t#Detection tolerance for nearby mullions based on system\n\t\t\t\t\t\t\t#required because of varying mullion sizes\n\n\t\t\t\t\t\t\tsystemDetectionFactor = storefrontConfig.currentConfig[\"closeMullionDetectionFactor\"]\n\n\t\t\t\t\t\t\tdetectionCheckDist0 = frameMullion0Width * systemDetectionFactor\n\t\t\t\t\t\t\tdetectionCheckDist1 = frameMullion1Width * systemDetectionFactor\n\t\n\n\t\t\t\t\t\t\tdoc.Regenerate()\n\t\t\t\t\t\t\tnewWall_grid = newWall.CurtainGrid\n\n\t\t\t\t\t\t\t#Check to see if a mullion exists in the spot where one would be created.\n\t\t\t\t\t\t\tcheckMullion0 = GetVerticalMullionsAtPoint(newWall_grid, door_end0, detectionTolerance=detectionCheckDist0)\n\t\t\t\t\t\t\tif not checkMullion0:\n\t\t\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t\t\tgrid0 = newWall_grid.AddGridLine(False, door_end0, False)\n\t\t\t\t\t\t\t\texcept:\n\t\t\t\t\t\t\t\t\tpass\n\n\t\t\t\t\t\t\t\tmullion0List = GetVerticalMullionsAtPoint(newWall_grid, door_end0, detectionTolerance=0.001)\n\t\t\t\t\t\t\t\tif mullion0List:\n\t\t\t\t\t\t\t\t\tfor mullion0 in mullion0List:\n\t\t\t\t\t\t\t\t\t\tmullion0.Pinned = False\n\t\t\t\t\t\t\t\t\t\tmullion0.Lock = False\n\t\t\t\t\t\t\t\t\t\tmullion0.ChangeTypeId(frameMullion0)\n\n\t\t\t\t\t\t\tdoc.Regenerate()\n\t\t\t\t\t\t\t#Check to see if a mullion exists in the spot where one would be created.\n\t\t\t\t\t\t\tcheckMullion1 = GetVerticalMullionsAtPoint(newWall_grid, door_end1, detectionTolerance=detectionCheckDist1)\n\t\t\t\t\t\t\tif not checkMullion1:\n\t\t\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t\t\tgrid1 = newWall_grid.AddGridLine(False, door_end1, False)\n\t\t\t\t\t\t\t\texcept:\n\t\t\t\t\t\t\t\t\tpass\n\n\t\t\t\t\t\t\t\tmullion1List = GetVerticalMullionsAtPoint(newWall_grid, door_end1, detectionTolerance=0.001)\n\t\t\t\t\t\t\t\tif mullion1List:\n\t\t\t\t\t\t\t\t\tfor mullion1 in mullion1List:\n\t\t\t\t\t\t\t\t\t\tmullion1.Pinned = False\n\t\t\t\t\t\t\t\t\t\tmullion1.Lock = False\n\t\t\t\t\t\t\t\t\t\tmullion1.ChangeTypeId(frameMullion1)\n\n\t\t\t\t\t\t#-----------------Empty Panel----------------#\n\t\t\t\t\t\t\tdoc.Regenerate()\n\t\t\t\t\t\t\tpanelToChangeId = RevitCurtainPanelsAtPoint(newWall_grid, doorLocationCenter, detectionTolerance=0.2)\n\t\t\t\t\t\t\tif panelToChangeId:\n\t\t\t\t\t\t\t\tpanelToChange = doc.GetElement(panelToChangeId[0])\n\t\t\t\t\t\t\t\tpanelToChange.Pinned = False\n\t\t\t\t\t\t\t\tpanelToChange.ChangeTypeId(panelTypeDict[storefrontConfig.currentConfig[\"panelEmpty\"]])\n\n\t\t\t\t\t\t#-----------------Sill Delete----------------#\n\t\t\t\t\t\t\tdoc.Regenerate()\n\n\t\t\t\t\t\t\tfilterName = storefrontConfig.currentConfig[\"AUTO_MULLION_BORDER1_HORIZ\"].split(\"_\")[1]\n\t\t\t\t\t\t\tdoorSillMullions = GetHorizontalMullionsAtPoint(newWall_grid, doorLocationCenter, nameFilter=filterName)\n\n\t\t\t\t\t\t\tfor dsm in doorSillMullions:\n\t\t\t\t\t\t\t\tdsm.Pinned = False\n\t\t\t\t\t\t\t\tdoc.Delete(dsm.Id)\n\n\t\t\t\t\t\t#-------------Continuous Head Above Door--------------#\n\n\t\t\t\t\t\t\tdoorFrameContinuous = storefrontConfig.currentConfig[\"mullionContinuousVerticalAtDoorTop\"]\n\n\t\t\t\t\t\t\tif not doorFrameContinuous:\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t#filterName = storefrontConfig.currentConfig[\"AUTO_MULLION_BORDER2_HORIZ\"].split(\"_\")[1]\n\n\t\t\t\t\t\t\t\t#Join head so its continuous\n\t\t\t\t\t\t\t\tdoc.Regenerate()\n\t\t\t\t\t\t\t\tdoorHeadMullions = GetHorizontalMullionsAtPoint(newWall_grid, doorLocationCenter, nameFilter=\"Head\")\n\t\t\t\t\t\t\t\tfor dhm in doorHeadMullions:\n\t\t\t\t\t\t\t\t\tdhm.JoinMullion()\n\t \n\t\t\t\t#-------------------Intermediates-------------------# \n\n\t\t\t\tnewWall_grid = newWall.CurtainGrid\n\t\t\t\tpanels = newWall_grid.GetPanelIds()\n\n\t\t\t\tintermediateMullionWidth = 0\n\t\t\t\tif storefrontConfig.currentConfig[\"isFramed\"]:\n\n\t\t\t\t\t#Select the right intermediate mullion in the project based\n\t\t\t\t\t#on which system is being used. \n\n\t\t\t\t\tif \"demising\" in wtName.lower():\n\t\t\t\t\t\tmulName = storefrontConfig.currentConfig[\"AUTO_MULLION_INTERIOR_VERT\"]\n\t\t\t\t\telif \"offset\" in wtName.lower():\n\t\t\t\t\t\tmulName = storefrontConfig.currentConfig[\"AUTO_MULLION_INTERIOR_VERT_OFFSET\"]\n\t\t\t\t\telif \"double\" in wtName.lower():\n\t\t\t\t\t\tmulName = storefrontConfig.currentConfig[\"AUTO_MULLION_INTERIOR_VERT_DOUBLE\"]\n\t\t\t\t\telse:\n\t\t\t\t\t\tmulName = storefrontConfig.currentConfig[\"AUTO_MULLION_INTERIOR_VERT\"]\n\n\t\t\t\t\tintermediateMullion = doc.GetElement(mullionDict[mulName])\n\n\t\t\t\t\t#Get the sizes of the intermediate\n\t\t\t\t\ttry:\n\t\t\t\t\t\tintermediateMullionWidth = intermediateMullion.get_Parameter(BuiltInParameter.CUST_MULLION_WIDTH1).AsDouble()\n\t\t\t\t\t\tintermediateMullionWidth += intermediateMullion.get_Parameter(BuiltInParameter.CUST_MULLION_WIDTH2).AsDouble()\n\t\t\t\t\texcept:\n\t\t\t\t\t\tfor p in intermediateMullion.Parameters:\n\t\t\t\t\t\t\tif p.Definition.Name == \"Width on side 1\":\n\t\t\t\t\t\t\t\tintermediateMullionWidth += p.AsDouble()\n\t\t\t\t\t\t\tif p.Definition.Name == \"Width on side 2\":\n\t\t\t\t\t\t\t\tintermediateMullionWidth += p.AsDouble()\n\n\t\t\t\t#For each panel, check the widths and divide them\n\t\t\t\t#according to the rules selected by the user. \n\t\t\t\tfor panelId in panels:\n\t\t\t\t\tpanel = doc.GetElement(panelId)\n\t\t\t\t\tpanelWidth = panel.get_Parameter(BuiltInParameter.CURTAIN_WALL_PANELS_WIDTH).AsDouble()\n\n\t\t\t\t\tif \"glazed\" in (panel.Name + panel.Symbol.Family.Name).lower() and panelWidth > minPanelWidth:\n\t\t\t\t\t\tnewGridPoints = []\n\t\t\t\t\t\tif storefrontSpacingType == 1:\n\t\t\t\t\t\t\tnewGridPoints = RevitDividePanelFixed(panel, storefrontPaneWidth, intermediateWidth=intermediateMullionWidth)\n\t\t\t\t\t\telif storefrontSpacingType == 0:\n\t\t\t\t\t\t\tnumberPanes = math.ceil(panelWidth/storefrontPaneWidth)\n\t\t\t\t\t\t\tif numberPanes > 1:\n\t\t\t\t\t\t\t\tnewGridPoints = RevitDividePanelEquidistant(panel, numberPanes, intermediateWidth=intermediateMullionWidth)\n\n\t\t\t\t\t\tif newGridPoints:\n\t\t\t\t\t\t\twith rpw.db.Transaction(\"Create intermediate grid lines\") as tx:\n\t\t\t\t\t\t\t\tSupressErrorsAndWarnings(tx)\n\t\t\t\t\t\t\t\tfor gridpt in newGridPoints:\n\t\t\t\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t\t\t\tgrid0 = newWall_grid.AddGridLine(False, gridpt, False)\n\t\t\t\t\t\t\t\t\t\tmullions0List = GetVerticalMullionsAtPoint(newWall_grid, grid0.FullCurve.Origin, detectionTolerance=0.001)\n\t\t\t\t\t\t\t\t\t\tfor mullion0 in mullions0List:\n\t\t\t\t\t\t\t\t\t\t\tmullion0.Pinned = False\n\t\t\t\t\t\t\t\t\t\t\tif storefrontConfig.currentConfig[\"isFramed\"]:\n\t\t\t\t\t\t\t\t\t\t\t\tmullion0.ChangeTypeId(intermediateMullion.Id)\n\n\t\t\t\t\t\t\t\t\t\t\t\t#Intermediates die into the head if mullion is \"Broken\"\n\t\t\t\t\t\t\t\t\t\t\t\tif not storefrontConfig.currentConfig[\"mullionContinuousVerticalIntermediateTop\"]:\n\t\t\t\t\t\t\t\t\t\t\t\t\tmullion0.BreakMullion()\n\t\t\t\t\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t\t\t\t\t#Delete mullion in the case that the system type is butt joined.\n\t\t\t\t\t\t\t\t\t\t\t\tdoc.Delete(mullion0.Id)\n\t\t\t\t\t\t\t\t\texcept:\n\t\t\t\t\t\t\t\t\t\tpass\n\n\t\t\t\t#---------------Special Sills---------------#\n\t\t\t\t\n\t\t\t\tnewWall_grid = newWall.CurtainGrid\n\n\t\t\t\tupdatedSill = None\n\n\t\t\t\tcurrentSill = storefrontConfig.currentConfig[\"AUTO_MULLION_BORDER1_HORIZ\"]\n\t\t\t\treplacementSills = storefrontConfig.currentConfig[\"specialSillConditions\"]\n\n\t\t\t\tif replacementSills:\n\t\t\t\t\tfor key,value in replacementSills.items():\n\t\t\t\t\t\tif key.lower() in wtName.lower():\n\t\t\t\t\t\t\tupdatedSill = mullionDict[value]\n\n\t\t\t\tif updatedSill:\n\t\t\t\t\tpanels = newWall_grid.GetPanelIds()\n\t\t\t\t\twith rpw.db.Transaction(\"Update Sills\") as tx:\n\t\t\t\t\t\tSupressErrorsAndWarnings(tx) \n\t\t\t\t\t\tfor panelId in panels:\n\t\t\t\t\t\t\tpanel = doc.GetElement(panelId)\n\t\t\t\t\t\t\tpanelPoint = panel.GetTransform().Origin\n\t\t\t\t\t\t\tsills = GetHorizontalMullionsAtPoint(newWall_grid, panelPoint, nameFilter=currentSill)\n\n\t\t\t\t\t\t\tsillElevationTupleList = []\n\t\t\t\t\t\t\tfor sill in sills:\n\t\t\t\t\t\t\t\tsillElevationTupleList.append((sill,float(sill.LocationCurve.Origin.Z)))\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\tsillElevationTupleList = sorted(sillElevationTupleList, key=lambda x: x[1])\n\n\t\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t\tsillToChange = sillElevationTupleList[0][0]\n\t\t\t\t\t\t\t\tsillToChange.Pinned = False\n\t\t\t\t\t\t\t\tsillToChange.ChangeTypeId(updatedSill)\n\t\t\t\t\t\t\texcept:\n\t\t\t\t\t\t\t\tpass \n\t\t\t\t \n\t\t\t\t#############################################\n\t\t\t\t# Final Param Setters #\n\t\t\t\t#############################################\n\t\t\t\t# Set heights, for whatever reason differing heights before adding gridlines is an issue so set this last.\n\t\t\t\twith rpw.db.Transaction(\"Create Curtain Wall\") as tx:\n\t\t\t\t\tSupressErrorsAndWarnings(tx)\n\t\t\t\t\tnewWallSillHeight = storefrontObject.SillHeight\n\t\t\t\t\tnewWallHeadHeight = storefrontObject.HeadHeight - storefrontObject.SillHeight\n\t\t\t\t\tnewWall.get_Parameter(BuiltInParameter.WALL_BASE_OFFSET).Set(newWallSillHeight)\n\t\t\t\t\tnewWall.get_Parameter(BuiltInParameter.WALL_USER_HEIGHT_PARAM).Set(newWallHeadHeight)\n\t\t\t\t\tnewWall.get_Parameter(BuiltInParameter.ALL_MODEL_INSTANCE_COMMENTS).Set(storefrontObject.SuperType)\n\t\t\t\t\tnewWall.get_Parameter(BuiltInParameter.ALL_MODEL_MARK).Set(str(selectedLevel) + \"-\"+ str(storefrontObject.AssemblyID))\n\n\n\n\tprint \"...CHECKING ERRORS...\"\n\n\tstorefront_check_errors()\n\n\tprint \"...DONE!\"", "def action_move_create(self):\n account_move = self.env[\"account.move\"]\n\n for request in self:\n if not request.journal_id:\n raise UserError(\n _(\n \"Please define a journal for this request.\"\n )\n )\n if not request.journal_id:\n raise UserError(\n _(\n \"Please define sequence on the journal related to this request.\"\n )\n )\n if any(\n request.approve_request_ids.filtered(\n lambda line: not line.account_id\n )\n ):\n raise UserError(\n _(\n \"There is a line without any account. Please configure a stock account \"\n \"for all product categories that have products on the lines\"\n )\n )\n if not request.approve_request_ids:\n raise UserError(_(\"Please add at least one line!\"))\n if request.move_id:\n continue\n\n company_currency = request.company_id.currency_id\n partner_id = request.end_user.user_id.partner_id.id\n iml = request.approve_request_line_move_line_get()\n name = request.name or \"\"\n credit = 0.0\n debit = reduce(\n lambda x, y: x + y, [line.get(\"credit\", 0.0) for line in iml]\n )\n\n iml.append(\n {\n \"name\": self.name or \"/\",\n \"account_id\": request.account_id.id,\n \"currency_id\": company_currency.id,\n \"date_maturity\": fields.Date.context_today(self),\n \"debit\": debit,\n \"credit\": credit,\n \"partner_id\": partner_id,\n }\n )\n\n iml = [(0, 0, line_item) for line_item in iml]\n move_vals = {\n \"ref\": request.name,\n \"line_ids\": iml,\n \"name\": self.name or \"/\",\n \"journal_id\": request.journal_id.id,\n \"date\": fields.Date.context_today(self),\n \"partner_id\": partner_id,\n \"narration\": request.name,\n }\n move = account_move.with_context(check_move_validity=False).create(\n move_vals\n )\n move.post()\n vals = {\n \"move_id\": move.id,\n \"move_name\": move.name,\n }\n request.write(vals)\n return True", "def generate_buttons(self):\n raise Exception('Implement me!')", "def generate(self, diagram):", "def create(self, vals):\n res = super(HelpdeskTicket, self).create(vals)\n content = \"\"\n if vals.get('timesheet_ids', False):\n content += \"Parte de Horas: <br/>\"\n for ts in vals.get('timesheet_ids'):\n if content != \"Parte de Horas: <br/>\":\n content += \"------------------------------------------------------------------------------<br/>\"\n for field_name in ts[2]:\n new_val = ts[2][field_name]\n if field_name == 'date':\n content += \" \\u2022 Fecha: \" + new_val + \"<br/>\"\n elif field_name == 'employee_id':\n new_employee = self.env['hr.employee'].browse(new_val).name\n content += \" \\u2022 Empleado: \" + new_employee + \"<br/>\"\n elif field_name == 'unit_amount':\n content += \" \\u2022 Duración: \" + '{0:02.0f}:{1:02.0f}'.format(\n *divmod(new_val * 60, 60)) + \"<br/>\"\n elif field_name == 'name' and new_val:\n content += \" \\u2022 Descripción: \" + new_val + \"<br/>\"\n res.message_post(body=content)\n\n return res", "def create_entry_buttons(self):\n num_song=0\n learned_num=0\n self.root.ids.entriesBox.clear_widgets()\n for each in self.song_list.list_song:\n # create a button for each song entry\n num_song+=1 #Add up the number of song for every song looped in the list\n\n if each.status == \"n\":\n temp_button = Button(text=\"{} by {} ({}) ({})\".format(each.title,each.artist,each.year,\"learned\"))#Format the text for learned song in temp_button\n else:\n temp_button = Button(text=\"{} by {} ({}) \".format(each.title,each.artist,each.year))\n temp_button.bind(on_release=self.press_entry)\n temp_button.bind(on_release=each.markSonglearned)#Mark the song chosen from the temp_button by clicking it learnt #Also note , by clicking refresh it will help\n self.root.ids.entriesBox.add_widget(temp_button)\n if each.status ==\"n\":\n temp_button.background_color = [1,0,0,1] #turn background color into red\n learned_num+=1\n else :\n temp_button.background_color = [2,1,1,2] #turn background color button into pink\n self.status_text = \"To learn:{} learned :{}\".format(num_song-learned_num,learned_num)", "def createMeshMoverButton(self, name, parent, uiInstance):\n\n part = name.partition(self.name)[2]\n\n # create the icon\n pixmap = QtGui.QPixmap(20, 15)\n pixmap.fill(QtGui.QColor(255, 176, 176))\n icon = QtGui.QIcon(pixmap)\n\n # create the button\n self.outlinerWidgets[name + \"_geoMoverBtn\"] = QtWidgets.QPushButton(icon, \"\")\n self.outlinerWidgets[name + \"_geoMoverBtn\"].setMinimumSize(QtCore.QSize(20, 15))\n self.outlinerWidgets[name + \"_geoMoverBtn\"].setMaximumSize(QtCore.QSize(20, 15))\n uiInstance.treeWidget.setItemWidget(parent, 3, self.outlinerWidgets[name + \"_geoMoverBtn\"])\n\n # connect and add to list\n self.outlinerWidgets[name + \"_geoMoverBtn\"].clicked.connect(\n partial(self.selectMover, part, False, False, True, self.outlinerWidgets[name + \"_geoMoverBtn\"]))\n self.outlinerControls.append([name + \"_geoMoverBtn\", name + \"_mover_geo\", icon])", "def setT1Button(self):\n self.T1Button = qt.QPushButton(\"Create T1 Mapping\")\n self.T1Button.toolTip = \"Create the T1 Mapping of the Scalar Volumes selected\"\n self.T1Button.enabled = False\n self.InputOutput_Layout.addRow(self.T1Button)", "def create(self, values):\n res = super(PurchaseOrderLine, self).create(values)\n states = ['purchase', 'done']\n if res.order_id.state in states:\n raise UserError(_('You can not create an additional purchase order line in a confirmed order '))\n return res", "def createGlobalMoverButton(self, name, parent, uiInstance):\n\n part = name.partition(self.name)[2]\n\n # create the icon\n pixmap = QtGui.QPixmap(20, 15)\n pixmap.fill(QtGui.QColor(\"yellow\"))\n icon = QtGui.QIcon(pixmap)\n\n # create the button\n self.outlinerWidgets[name + \"_globalMoverBtn\"] = QtWidgets.QPushButton(icon, \"\")\n self.outlinerWidgets[name + \"_globalMoverBtn\"].setMinimumSize(QtCore.QSize(20, 15))\n self.outlinerWidgets[name + \"_globalMoverBtn\"].setMaximumSize(QtCore.QSize(20, 15))\n uiInstance.treeWidget.setItemWidget(parent, 1, self.outlinerWidgets[name + \"_globalMoverBtn\"])\n\n # connect and add to list\n self.outlinerWidgets[name + \"_globalMoverBtn\"].clicked.connect(\n partial(self.selectMover, part, True, False, False, self.outlinerWidgets[name + \"_globalMoverBtn\"]))\n self.outlinerControls.append([name + \"_globalMoverBtn\", name + \"_mover\", icon])", "def main():\n# year = int(input(\"Enter year for calendar: \"))\n# first_day = first_day_of_year(year)\n\n # Loop through months 1 through 12\n # for month in range(1, NUM_MONTHS + 1):\n# first_day = print_month(first_day, month, year)\n\n canvas = make_canvas(CANVAS_WIDTH, CANVAS_HEIGHT, 'Calendar')\n # present the header, today's date\n\n top_rows(canvas)\n # present two buttons: weekly display and monthly display\n weekly_display_type = True\n date_to_present = date.today()\n #button_weekly(canvas,weekly_display_type,date_to_present)\n #button_monthly(canvas, weekly_display_type, date_to_present)\n # present weekly display\n canvas.update()\n canvas.mainloop()", "def fill_active(self, layout):\n self.new_func_triggers = QLineEdit()\n layout.addRow(\"new func triggers\", self.new_func_triggers)\n self.new_param = QLineEdit()\n layout.addRow(\"new parameter\", self.new_param)", "def new_polynomial_trajectory(ui):\n\n global polynomial_trajectories\n\n # Finding a Name .........................................................\n\n k = 0\n while \"r\" + str(k) in [t[\"name\"] for t in polynomial_trajectories]:\n k += 1\n\n trajectory = {\"name\": \"r\" + str(k),\n \"conditions\": []}\n\n # Deselect everything ....................................................\n\n ui.listWidget_poly.clearSelection()\n\n # Adding to the list widget ..............................................\n\n item = QListWidgetItem()\n item.setText(trajectory[\"name\"])\n ui.listWidget_poly.addItem(item)\n ui.lineEdit_poly_fname.setText(\"r\" + str(k))\n polynomial_trajectories.append(trajectory)\n ui.listWidget_poly.setCurrentRow(ui.listWidget_poly.count() - 1)\n ui.lineEdit_poly_fname.setStyleSheet(\"color: #efefef;\")\n\n # Adding to the loops combobox ...........................................\n\n ui.comboBox_loops_trajectory.addItem(trajectory[\"name\"])\n\n if len(polynomial_trajectories) == 1:\n ui.pushButton_poly_del.setEnabled(True)", "def startLineDrawing(self, startPos):\n self.line = LineNodePath(render2d, thickness=2, colorVec=(0.8,0.8,0.8,1))\n self.line.moveTo(startPos)\n t = taskMgr.add(self.drawLineTask, \"drawLineTask\")\n t.startPos = startPos", "def action_move_create(self):\n\n res = super(account_invoice, self).action_move_create()\n\n for inv in self:\n if not inv.move_id:\n return res\n for ml in inv.move_id.line_id:\n ml_vals = {\n 'emp_police': inv.pol_numpol,\n 'emp_quittance': inv.prm_numero_quittance,\n 'emp_effet': datetime.datetime.strptime(inv.prm_datedeb, '%Y-%m-%d').date() if inv.prm_datedeb else datetime.datetime.today(),\n 'emp_datech': datetime.datetime.strptime(inv.prm_datefin, '%Y-%m-%d').date() if inv.prm_datefin else datetime.datetime.today(),\n }\n ml.update(ml_vals)\n move_vals = {\n 'num_police': inv.pol_numpol,\n 'num_quittance': inv.prm_numero_quittance,\n 'date_effect': datetime.datetime.strptime(inv.prm_datedeb, '%Y-%m-%d').date() if inv.prm_datedeb else datetime.datetime.today(),\n 'date_end': datetime.datetime.strptime(inv.prm_datefin, '%Y-%m-%d').date() if inv.prm_datefin else datetime.datetime.today(),\n }\n inv.move_id.update(move_vals)\n self._log_event()\n return res", "def create_new_round(self) -> None:\n\n # 1\n for elem in self.data:\n key = ''\n value = ''\n for k, v in elem.items():\n if k == 'name':\n key = v\n else:\n value = v.get()\n self.attributs.update({key: value})\n\n # 2\n order = ct.Controls.verify_round_creation(self.attributs)\n self.master.master.list_instances_menus_tournament = Menu.update_menus_tournament(order, self.master)\n self.master.master.left_window.update_and_display(\n self.master.master.list_instances_menus_tournament)\n self.master.master.launch()\n self.destroy_widgets()\n\n # # 3\n matches = ct.Controls.generate_matches()\n self.display_round_information(matches)" ]
[ "0.6388559", "0.62406707", "0.5945554", "0.59113073", "0.58027905", "0.5781649", "0.5714266", "0.5638296", "0.5438896", "0.5420091", "0.53819907", "0.5336668", "0.53099597", "0.52825195", "0.5279254", "0.5253911", "0.52533376", "0.52415204", "0.523156", "0.5220959", "0.5211231", "0.5161544", "0.5143496", "0.5122719", "0.51189846", "0.51026285", "0.5095513", "0.5093841", "0.50921416", "0.5090281", "0.5083733", "0.5073505", "0.5047427", "0.5025119", "0.50217897", "0.5015156", "0.50123775", "0.50083905", "0.5003364", "0.49985412", "0.49984455", "0.49935246", "0.4983545", "0.49794263", "0.49774995", "0.4968803", "0.49526614", "0.4947677", "0.49333128", "0.4930623", "0.49282056", "0.49256715", "0.49250984", "0.49167866", "0.49134165", "0.49105325", "0.4910208", "0.4907457", "0.49069983", "0.49052072", "0.4898786", "0.48941413", "0.48818886", "0.48774824", "0.48757526", "0.48716", "0.48681372", "0.4864182", "0.48635516", "0.4852914", "0.48518622", "0.48508468", "0.48466876", "0.48456264", "0.48338592", "0.4832297", "0.4825977", "0.48241705", "0.48199555", "0.48158017", "0.4809759", "0.48079073", "0.4806433", "0.48060244", "0.48042533", "0.47987455", "0.47911838", "0.47813419", "0.47792393", "0.47790778", "0.4778928", "0.4778", "0.4775559", "0.47689334", "0.47624594", "0.4761416", "0.47605562", "0.47521505", "0.47519812", "0.47411904" ]
0.5296061
13
This Method is used to overrides orm create method, to change state and tenant of related property.
def create(self, vals): res = super(AccountAnalyticAccount, self).create(vals) if self._context.get('is_landlord_rent'): res.code = self.env['ir.sequence'].next_by_code( 'landlord.rent') if res.is_landlord_rent: res.write({'is_property': False}) if 'property_id' in vals: prop_brw = self.env['account.asset'].browse( vals['property_id']) if not prop_brw.property_owner: prop_brw.write( {'property_owner': vals.get('property_owner_id')}) return res
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def perform_create(self, serializer):\n serializer.save(using=UserConf.db)", "async def create(self, **state):\n connection = state.pop(self.connection_kwarg, None)\n obj = self.model(**state)\n await obj.save(force_insert=True, connection=connection)\n return obj", "def perform_create(self, serializer):\n serializer.save(user_id=self.request.user)\n up = UserProfile.objects.get(user=self.request.user)\n up.greyfish_active = True\n up.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n serializer.save()", "def perform_create(self, serializer):\n\n # check permissions?\n if self.property_pk:\n serializer.save(property_id=self.property_pk)\n else:\n raise Exception('No property_pk (property view id) provided in URL to create the meter')", "def perform_create(self, serializer):\n extra_data = self.get_additional_data(True)\n serializer.save(**extra_data)", "def _create(cls, model_class, *args, **kwargs):\n for k in kwargs.keys():\n if k in model_class.relationships():\n rel_key = '{}_id'.format(k)\n kwargs[rel_key] = str(kwargs[k].id)\n obj = super(BaseFactory, cls)._create(model_class, *args, **kwargs)\n obj.save(obj)\n return obj", "def save(self, *args, **kwargs):\n self.entity_type = \"Person\"\n super().save(*args, **kwargs)", "def create_property(self, key, prop):\n\n setting = self.new_property(key, prop)\n setting.create()\n return setting", "def create(self):\n\n pass", "def test_create(self):\n\n res = self.metadata.create_or_update(data=self.create)\n\n self.assertEqual(res.name, self.entity.name)\n self.assertEqual(res.service.id, self.entity.service.id)\n self.assertEqual(res.owner, None)", "def create(self):\n ...", "def perform_create(self, serializer):\n instance = serializer.save(\n domain=self.org_safe_get(self.request.user, self.kwargs.get('pk')))", "def perform_create(self, serializer):\n serializer.save(owner=self.request.user)", "def perform_create(self, serializer):\n serializer.save(owner=self.request.user)", "def perform_create(self, serializer):\n serializer.save(owner=self.request.user)", "def perform_create(self, serializer):\n serializer.save(owner=self.request.user)", "def perform_create(self, serializer):\n serializer.save(owner=self.request.user)", "def create(self):\n pass", "def create(self):\n pass", "def create(self):\n pass", "def beforeCreate(self):", "def setUp(self):\n super().setUp()\n Tenant.objects.get_or_create(schema_name=\"public\")", "def create(self):", "def perform_create(self, serializer):\n serializer.save(warehouse=self.request.user)", "def perform_create(self, serializer):\n serializer.save(created_by=self.request.user, modified_by=self.request.user)", "def create(self):\n\n raise NotImplementedError", "def perform_create(self, serializer):\n if serializer.instance is None:\n profile = Profile.objects.get(user=self.request.user)\n #print profile\n serializer.save(owner=profile)", "def perform_create(self, serializer):\n serializer.save(creator=self.request.user)", "def create_properties(self, properties):\n self._update_metadata_date(properties)\n self._backend.insert_product_properties(properties)", "def create(self, **kwargs):\n obj = self.model(**kwargs)\n self._for_write = True\n obj.save(force_insert=True, using=self.db, skip_moderation=True)\n return obj", "def create(cls, **kwargs):\r\n return cls().fill(**kwargs).save()", "def perform_create(self, serializer):\n serializer.save(created_by=self.request.user,\n modified_by=self.request.user,\n area=self.get_poly_obj())", "def get_property_state(self, org, **kw):\n property_details = self.get_details()\n property_details.update(kw)\n\n ps = PropertyState.objects.create(organization=org, **property_details)\n auditlog_detail = {}\n PropertyAuditLog.objects.create(organization=org, state=ps, **auditlog_detail)\n return ps", "def __init__(self, set_on_create=False):\n self.set_on_create = set_on_create", "def __init__(self, set_on_create=False):\n self.set_on_create = set_on_create", "def __init__(self, set_on_create=False):\n self.set_on_create = set_on_create", "def perform_create(self, serializer):\n profile = models.Profile.objects.get(pk=self.kwargs.get(\"pk\"))\n\n return serializer.save(profile=profile)", "def _create(self):\n def _createOnSuccess(result):\n if result == False:\n return defer.succeed(self)\n return self._config.insertObj(self)\n\n def _beforeSave(result):\n if result == False:\n return defer.succeed(self)\n return defer.maybeDeferred(self.beforeSave).addCallback(_createOnSuccess)\n\n return defer.maybeDeferred(self.beforeCreate).addCallback(_beforeSave)", "def perform_create(self, serializer):\n instance = serializer.save()\n instance.run()", "def _create(self, data):\n model = self.model\n data = self._check_odoo_attribute(data)\n binding = model.create(data)\n self._create_attribute_option(binding, data)\n _logger.debug('%s %d created from magento %s',\n self.model._name, binding.id, self.magento_id)\n return binding", "def perform_create(self, serializer):\n serializer.save(user_profile=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user_profile=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user_profile=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user_profile=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user_profile=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user_profile=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user_profile = self.request.user)", "def create_person(self):", "def perform_create(self,serializer):\n serializer.save(user_profile=self.request.user)", "def perform_create(self,serializer):\n serializer.save(user_profile=self.request.user)", "def create(self, **kwargs):\n return self.save(self.new(**kwargs))", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "def perform_create(self, serializer):\n serializer.save(user=self.request.user)", "async def create(self):\n assert self.backend.username_field in self\n assert \"password\" in self\n self.setdefault(\"date_joined\", now_utc())\n self.setdefault(\"is_superuser\", False)\n self.setdefault(\"is_staff\", False)\n self.setdefault(\"is_active\", True)\n self[\"id\"] = await self.backend.insert(**self)", "def perform_create(self, serializer):\n team = get_object_or_404(models.Team, pk=self.kwargs.get('pk'))\n\n return serializer.save(team=team)", "def create_schema(self, schema):\n base = '/api/storage/v1/schema'\n svc = \"%(base)s/%(prop)s\" % {'base': base, 'prop': schema['property']}\n ret = self.rclient.get(svc)\n if ret.status == restclient.Status.OK:\n LOG.warning('Property %s already exists.', schema['property'])\n return\n ret = self.rclient.post(base, schema)\n if ret.status != restclient.Status.CREATED:\n exception_msg = (_('Error Creating '\n 'Property: %(property)s '\n 'Type: %(type)s '\n 'Description: %(description)s '\n 'Return code: %(ret.status)d '\n 'Message: %(ret.data)s.')\n % {'property': schema['property'],\n 'type': schema['type'],\n 'description': schema['description'],\n 'ret.status': ret.status,\n 'ret.data': ret.data})\n LOG.error(exception_msg)\n raise exception.ShareBackendException(msg=exception_msg)", "def create(self, name, **kwargs):\n return super().create(name, **kwargs)", "def test_api_object_creating_property(self, api_object):\n api_object.status = 'CREATING'\n assert api_object.creating\n assert not api_object.partial", "def create():\n upgrade()\n populate()", "def create(self):\n self.created_date = timezone.now()\n self.save()", "def perform_create(self, serializer):\n return serializer.save(user=self.request.user)", "def post_create(self, state):\n\n self.id = self.get_flags_from_list(self.id)\n self.flags = self.get_flags_from_list(self.flags)", "def create(self, *args, **kwargs):\n pass", "def create(self, values):\n if values.get('country_id', False):\n country = self.env['res.country'].browse(values['country_id'])\n if country.code == 'SA':\n values.update({'is_saudi': True})\n else:\n values.update({'is_saudi': False})\n\n res = super(HrEmployee, self).create(values)\n if values.get('user_id', False):\n self.user_id.write({'employee_id': res})\n return res", "def create(self):\n db.session.add(self)\n db.session.commit()", "def perform_create(self, serializer): # this method runs everytime a POST method is called\n serializer.save(user_profile=self.request.user)", "def perform_create(self,serializer):\n serializer.save(user_id=self.request.user)", "def get_property_state(self, organization=None, **kw):\n property_details = {}\n if 'no_default_data' not in kw:\n property_details = self.get_details()\n else:\n del kw['no_default_data']\n\n property_details.update(kw)\n ps = PropertyState.objects.create(\n organization=self._get_attr('organization', self.organization),\n **property_details\n )\n # make sure to create an audit log so that we can test various methods (e.g., updating properties)\n PropertyAuditLog.objects.create(\n organization=self._get_attr('organization', self.organization),\n state=ps,\n record_type=AUDIT_IMPORT,\n name='Import Creation'\n )\n return ps", "def perform_create(self, serializer):\n user = SupportRequest.target_user(self.request)\n client = Client.objects.filter_by_user(user).get(id=int(self.get_parents_query_dict()['client']))\n if 'client' in serializer.validated_data:\n if 'civil_status' in serializer.validated_data['client']:\n client.civil_status = serializer.validated_data['client']['civil_status']\n if 'smoker' in serializer.validated_data['client']:\n client.smoker = serializer.validated_data['client']['smoker']\n if 'drinks' in serializer.validated_data['client']:\n client.drinks = serializer.validated_data['client']['drinks']\n if 'height' in serializer.validated_data['client']:\n client.height = serializer.validated_data['client']['height']\n if 'weight' in serializer.validated_data['client']:\n client.weight = serializer.validated_data['client']['weight']\n if 'daily_exercise' in serializer.validated_data['client']:\n client.daily_exercise = serializer.validated_data['client']['daily_exercise']\n\n if 'home_value' in serializer.validated_data['client']:\n client.home_value = serializer.validated_data['client']['home_value']\n if 'home_growth' in serializer.validated_data['client']:\n client.home_growth = serializer.validated_data['client']['home_growth']\n if 'ss_fra_todays' in serializer.validated_data['client']:\n client.ss_fra_todays = serializer.validated_data['client']['ss_fra_todays']\n if 'ss_fra_retirement' in serializer.validated_data['client']:\n client.ss_fra_retirement = serializer.validated_data['client']['ss_fra_retirement']\n if 'state_tax_after_credits' in serializer.validated_data['client']:\n client.state_tax_after_credits = serializer.validated_data['client']['state_tax_after_credits']\n if 'state_tax_effrate' in serializer.validated_data['client']:\n client.state_tax_effrate = serializer.validated_data['client']['state_tax_effrate']\n if 'pension_name' in serializer.validated_data['client']:\n client.pension_name = serializer.validated_data['client']['pension_name']\n if 'pension_amount' in serializer.validated_data['client']:\n client.pension_amount = serializer.validated_data['client']['pension_amount']\n if 'pension_start_date' in serializer.validated_data['client']:\n client.pension_start_date = serializer.validated_data['client']['pension_start_date']\n if 'employee_contributions_last_year' in serializer.validated_data['client']:\n client.employee_contributions_last_year = serializer.validated_data['client']['employee_contributions_last_year']\n if 'employer_contributions_last_year' in serializer.validated_data['client']:\n client.employer_contributions_last_year = serializer.validated_data['client']['employer_contributions_last_year']\n if 'total_contributions_last_year' in serializer.validated_data['client']:\n client.total_contributions_last_year = serializer.validated_data['client']['total_contributions_last_year']\n client.save()\n return serializer.save(client=client)", "def obj_create(self, bundle, **kwargs):\n bundle.obj = self._meta.object_class()\n\n for key, value in kwargs.items():\n setattr(bundle.obj, key, value)\n\n self.authorized_create_detail(self.get_object_list(bundle.request), bundle)\n bundle = self.full_hydrate(bundle)\n bundle.obj.user_created_id = bundle.request.user.id\n return self.save(bundle)" ]
[ "0.58959424", "0.57716787", "0.5733233", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.5730677", "0.56831616", "0.5659891", "0.5654466", "0.55987287", "0.55387574", "0.5533734", "0.5516249", "0.5515591", "0.5496158", "0.54565495", "0.54565495", "0.54565495", "0.54565495", "0.54565495", "0.54357684", "0.54357684", "0.54357684", "0.5430751", "0.54112804", "0.53789407", "0.5316191", "0.53156203", "0.53006804", "0.52932656", "0.52826995", "0.52761835", "0.5268547", "0.5259307", "0.5241911", "0.5220695", "0.52144694", "0.52144694", "0.52144694", "0.52085024", "0.5206606", "0.5191786", "0.51871103", "0.5169453", "0.5169453", "0.5169453", "0.5169453", "0.5169453", "0.5169453", "0.51570535", "0.51536447", "0.5145241", "0.5145241", "0.5132667", "0.51325977", "0.51325977", "0.51325977", "0.51325977", "0.51325977", "0.51325977", "0.51325977", "0.51325977", "0.51325977", "0.51325977", "0.51325977", "0.51325977", "0.51325977", "0.51325977", "0.51322573", "0.5131745", "0.51276475", "0.51169616", "0.51145834", "0.5102313", "0.5092083", "0.50500685", "0.50416553", "0.50324416", "0.5029771", "0.49772868", "0.4975755", "0.49736875", "0.49736178", "0.49709478", "0.4969667" ]
0.575662
2
Overrides orm unlink method,
def unlink(self): if self._context.get('is_landlord_rent'): rent_ids = [] for tenancy_rec in self: analytic_ids = self.env['account.analytic.line'].search( [('account_id', '=', tenancy_rec.id)]) if analytic_ids and analytic_ids.ids: analytic_ids.unlink() rent_ids = self.env['tenancy.rent.schedule'].search( [('tenancy_id', '=', tenancy_rec.id)]) post_rent = [x.id for x in rent_ids if x.move_check is True] if post_rent: raise Warning( _('''You cannot delete Tenancy record, if any related Rent''' '''Schedule entries are in posted.''')) else: rent_ids.unlink() return super(AccountAnalyticAccount, self).unlink()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unlink(self, link_id):", "def unlink(self):\n if not self:\n return True\n \n # for recomputing fields\n self.modified(self._fields)\n \n self._check_concurrency()\n \n self.check_access_rights('unlink')\n \n # Check if the records are used as default properties.\n refs = ['%s,%s' % (self._name, i) for i in self.ids]\n if self.env['ir.property'].search([('res_id', '=', False), ('value_reference', 'in', refs)]):\n raise UserError(_('Unable to delete this document because it is used as a default property'))\n \n # Delete the records' properties.\n with self.env.norecompute():\n self.env['ir.property'].search([('res_id', 'in', refs)]).unlink()\n self.delete_workflow()\n self.check_access_rule('unlink')\n \n cr = self._cr\n Data = self.env['ir.model.data'].sudo().with_context({})\n Defaults = self.env['ir.default'].sudo()\n Attachment = self.env['ir.attachment']\n \n for sub_ids in cr.split_for_in_conditions(self.ids):\n query = \"DELETE FROM %s WHERE id IN %%s\" % self._table\n cr.execute(query, (sub_ids,))\n \n # Removing the ir_model_data reference if the record being deleted\n # is a record created by xml/csv file, as these are not connected\n # with real database foreign keys, and would be dangling references.\n #\n # Note: the following steps are performed as superuser to avoid\n # access rights restrictions, and with no context to avoid possible\n # side-effects during admin calls.\n data = Data.search([('model', '=', self._name), ('res_id', 'in', sub_ids)])\n if data:\n data.unlink()\n \n # For the same reason, remove the defaults having some of the\n # records as value\n Defaults.discard_records(self.browse(sub_ids))\n \n # For the same reason, remove the relevant records in ir_attachment\n # (the search is performed with sql as the search method of\n # ir_attachment is overridden to hide attachments of deleted\n # records)\n query = 'SELECT id FROM ir_attachment WHERE res_model=%s AND res_id IN %s'\n cr.execute(query, (self._name, sub_ids))\n attachments = Attachment.browse([row[0] for row in cr.fetchall()])\n if attachments:\n attachments.unlink()\n \n # invalidate the *whole* cache, since the orm does not handle all\n # changes made in the database, like cascading delete!\n self.invalidate_cache()\n \n # recompute new-style fields\n if self.env.recompute and self._context.get('recompute', True):\n self.recompute()\n # auditing: deletions are infrequent and leave no trace in the database\n _unlink.info('User #%s deleted %s records with IDs: %r', self._uid, self._name, self.ids)\n return True", "def unlink(self, model, ids, context={}):\n try:\n res = self.object_facade.execute(self.dbname, self.user_id, self.user_passwd,\n model, 'unlink', ids, context)\n return res\n except socket.error, err:\n raise Exception(u'Conexion rechazada: %s!' % err)\n except xmlrpclib.Fault, err:\n raise Exception(u'Error %s en unlink: %s' % (err.faultCode, err.faultString))", "def unlink(self, model, ids, context={}):\n try:\n res = self.object_facade.execute(self.dbname, self.user_id, self.user_passwd,\n model, 'unlink', ids, context)\n return res\n except socket.error, err:\n raise Exception(u'Conexion rechazada: %s!' % err)\n except xmlrpclib.Fault, err:\n raise Exception(u'Error %s en unlink: %s' % (err.faultCode, err.faultString))", "def model_delete(self, db):\n db.session.delete(self)\n db.session.commit()", "def unlink(self):\n self._linked = False\n self.is_dirty = False\n return self", "def unlink(self, path: PathLike):", "def unlink_action(self):\n self.check_access_rights('write', raise_exception=True)\n self.filtered('binding_model_id').write({'binding_model_id': False})\n return True", "def delete(self):\n ...", "def unlink(self):\n if self.on_magento:\n sku = self.default_code\n magento = self.env['magento.backend'].search([('id', '!=', False)], limit=1, order='id DESC')\n product = Product(magento.web_url, magento.access_token, True)\n if product:\n # Delete magento product\n try:\n product.delete_magento_product(sku)\n product.delete_magento_product_all(sku)\n except Exception as e:\n 'Sh@dowWalker'\n result = super(MagentoSyncOdoo, self).unlink()\n return result", "def delete(self):\r\n db.session.delete(self)\r\n db.session.commit()", "def unlink(self):\n if self.resource is None:\n self.resource = self.client.get_resource(self.href)\n self.client.post_linked_resource(\n self.resource, RelationType.UNLINK_FROM_TEMPLATE,\n EntityType.ROLE.value, None)", "def delete_from_db(self):\n db.session.delete(self)\n db.session.commit()", "def delete_from_db(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n DATABASE_CONNECTION.delete(self.__class__.__name__, self.id)", "def delete(self):\n self.dbm().model_delete(self)", "def delete_from_db(self):\n db.session.delete(self)\n db.session.commit()\n # try:\n # db.session.delete(self)\n # db.session.commit()\n # except exc.IntegrityError:\n # db.session.rollback()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def delete(self):\n db.session.delete(self)\n db.session.commit()", "def unlink ( self, fspath ):\n return", "def delete(self):\r\n s = self.get_session()\r\n s.delete(self)\r\n s.commit()", "def delete(self):\n raise NotImplementedError", "def remove(self):\n db.session.delete(self)\n db.session.commit()", "def delete_image(self):\n Image.objects.get(id = self.id).delete()", "def delete(self):\n if Model.data_connector:\n with Model.data_connector.u_lock:\n Model.data_connector.remove_object(self)", "def delete(self, obj):", "def delete(self, using=None):\n self.model.remove_field(self)", "def delete():", "def unlink(self):\n raise ValidationError(_(\"Products may not be deleted. Please archive them instead.\"))", "def unlink(address):", "def delete(self, identifier):\n self.get(identifier)\n conn = self.get_connector()\n cursor = conn.cursor()\n\n query = \"delete from {0} where {2}={1}\".format(\n self.ressource_config[\"table\"],\n identifier,\n self.model.pk_field.name)\n try:\n cursor.execute(query)\n except sqlite3.IntegrityError, e:\n message = \"\"\n if \"foreign\" in e.message:\n message = \"\"\"another ressource depends on this\n object. Cloud not delete before all ressources\n depending on it are also deleted\"\"\"\n\n raise BadRequest(message)\n\n conn.commit()\n conn.close()", "def remove_link():", "def delete(self):\n raise NotImplementedError()", "def delete(self):\n raise NotImplementedError()", "def delete(self):\n\n raise NotImplementedError()", "def _delete_from_db(self):\r\n if not self._created:\r\n return\r\n\r\n tdb.del_thing(self._type_id, self._id)\r\n cache.delete(thing_prefix(self.__class__.__name__, self._id))", "def _post_delete(self, instance, **kwargs):\n pk_name = instance._meta.pk.name\n for key in self.cache_fields:\n if key in ('pk', pk_name):\n continue\n # remove pointers\n cache.delete(self._get_from_cache_key(**{key: getattr(instance, key)}))\n # remove actual object\n cache.delete(self._get_from_cache_key(**{pk_name: instance.pk}))", "def delete(self, _id):", "def delete(self):\n db.session.delete(self)\n self.__commit()", "def delete(self):\n try:\n self.s.delete(self)\n self.s.commit()\n except SQLAlchemyError:\n self.s.rollback()\n raise", "def delete(self):\n\n\n try:\n db = getDatabase()\n connection = db.connect()\n\n connection.delete(self)\n except Exception as e:\n raise e\n finally:\n db.dispose()", "async def rm_object(model, column, conn: Database, data):\n query = delete(model).where(column == data)\n return await conn.execute(query)", "def delete_self(self):\n self.table.remove((Query().name == self.name))", "def delete_self(self):\n self.table.remove((Query().name == self.name))", "def _remove_link(self, name, object_id):\n if not name in self.data:\n return\n\n if self.data[name] and object_id in self.data[name]:\n self.data[name] = self.data[name].remove(object_id)", "def _delete(self, model_obj):\n conn = self._get_session()\n db_item = None\n\n # Fetch the record from database\n try:\n identifier = getattr(model_obj, id_field(self.entity_cls).attribute_name)\n db_item = conn.query(self.model_cls).get(\n identifier\n ) # This will raise exception if object was not found\n except DatabaseError as exc:\n logger.error(f\"Database Record not found: {exc}\")\n raise\n\n if db_item is None:\n conn.rollback()\n conn.close()\n raise ObjectNotFoundError(\n {\n \"_entity\": f\"`{self.entity_cls.__name__}` object with identifier {identifier} \"\n f\"does not exist.\"\n }\n )\n\n try:\n conn.delete(db_item)\n except DatabaseError as exc:\n logger.error(f\"Error while deleting: {exc}\")\n raise\n finally:\n if not current_uow:\n conn.commit()\n conn.close()\n\n return model_obj", "def delete_model(self, request, instance):\n pass", "def _delete_from_db(self, instance: DBModelInstance) -> None:\n self.db.session.delete(instance)\n self.db.session.commit()", "def remove():\n\n db_remove()", "def delete(self):\n db.session.delete(self)\n db.session.commit()\n return self", "def otherfiles_post_delete(sender, instance, **kwargs):\n instance.url.delete(False)", "def delete(self, obj):\n raise NotImplementedError", "def remove_data(self):\n db.session.delete(self)\n db.session.commit( )", "def delete(self, model):\n self._isinstance(model)\n db.session.delete(model)\n db.session.commit()", "def __del__(self):\n print(f\"{self.fullname()} deleted from database.\")", "def __delete__(self, instance):\n instance.doc.pop(self.slug, None)", "def delete(self):\n\n raise NotImplementedError('Must be implemented by subclasses')", "def delete(self):\n self.manager.delete(self.name)", "def delete(self):\n self.manager.delete(self.name)", "def delete(self, *args, **kwargs):\n super(Image, self).delete(*args, **kwargs)", "def delete(self):\n self.id = uuid4()\n DataStore.remove_instance(self)", "def delete(self, obj=None):\n pass", "def delete(self, id=None, **kwargs):\r\n rm = ResourceManager()\r\n pt = self.db.auth_permission\r\n if id and not isinstance(id, (list, tuple, set)):\r\n id = [id]\r\n\r\n # removing private args\r\n if self.private_args:\r\n private_args = self.private_args.table\r\n self.private_args.delete(id)\r\n else:\r\n private_args = None\r\n\r\n # # removing many to many references\r\n # m2ms = set()\r\n # for reference in (tuple(x.split('/')) for x in imap(itemgetter('indexName'),self.many_to_many)):\r\n # resource = rm.m2m(reference)\r\n # if resource:\r\n # m2ms.add(resource.table)\r\n # resource.delete(self,collection = id)\r\n\r\n # getting table names and field names to delete\r\n cascading_deletion = tuple((field.table, field) for field in self.table._referenced_by if\r\n field.ondelete == 'CASCADE' and field.table != private_args) # and field.table not in m2ms)\r\n # deleting all related objects\r\n for table, field in cascading_deletion:\r\n res = rm.resource(table)\r\n if res:\r\n # fetch all id of related rows\r\n ids = set(chain(*self.sql(field.belongs(id), table._id, as_dict=False)))\r\n if ids:\r\n # if related entitiy is a many to many relation delete reference with other objects, but not related objects\r\n if isinstance(res, ManyToManyRelation):\r\n # making deletion simpy by forign related attribute\r\n res.delete(self, resource_id=ids)\r\n else:\r\n res.delete(id=ids, _check_permissions=False)\r\n\r\n self.db(self.table.id.belongs(id)).delete()\r\n # deleting all directly related permissions\r\n self.db((pt.table_name == self.table._tablename) & pt.record_id.belongs(id)).delete()\r\n # if realtime_enabled and self.minimal_permissions:\r\n # sync_permissions(self.table._tablename, id, self.minimal_permissions)\r\n # perms = sql(pt.record_id.belongs(id) & (pt.table_name == self.table._tablename))\r\n # if perms:\r\n # rt_sync_permissions(self.table, id, perms)\r", "def delete(self):\n with sqlite3.connect(self.dbpath) as connection: \n cursor = connection.cursor()\n DELETESQL = \"\"\"DELETE FROM accounts WHERE id=:id \"\"\"\n cursor.execute(DELETESQL, {\"id\": self.id})\n self.id = None", "def unlink_all(self):\n del self._links[:]", "def delete(self, name):\n if (self.model_dir / (str(name) + '.pkl')).exists():\n (self.model_dir / (str(name) + '.pkl')).unlink()", "def unlink(self):\n analytic_accounts_to_delete = self.env['account.analytic.account']\n for project in self:\n if project.analytic_account_id and not project.analytic_account_id.line_ids:\n analytic_accounts_to_delete |= project.analytic_account_id\n result = super(Project, self).unlink()\n analytic_accounts_to_delete.unlink()\n return result", "def _delete(self, model_obj):\n conn = self._get_session()\n\n try:\n model_obj.delete(\n index=self.model_cls._index._name,\n using=conn,\n refresh=True,\n )\n except Exception as exc:\n logger.error(f\"Error while creating: {exc}\")\n raise\n\n return model_obj", "def delete(self, *args, **kwargs):\n self.delete_relatives()\n old_content = self.content\n super().delete(*args, **kwargs)\n if old_content.isOrphaned():\n old_content.delete()", "def delete(self, name):\n\n pass", "def delete(self)->None:\n database.cursor.execute(\n \"DELETE FROM {} WHERE id={}\".format(self.table_name, self.id))\n database.connection.commit()", "def delete(self):\n db.session.delete(self)\n try:\n db.session.commit()\n return True\n except Exception as error:\n db.session.rollback()\n print(error.args)\n return False", "def destroy(self):\n db.session.delete(self)\n db.session.commit()\n return True", "def __delete__(self, instance):\n self.session.close()", "def force_delete(self):\n self.manager.force_delete(self)", "def force_delete(self):\n self.manager.force_delete(self)", "def _delete (self):\n self._exec ('delete from table_name where id=%(id)s')", "def delete_link(db_object, text=None):\n if text is None:\n text = 'delete'\n return _make_link(db_object.delete_url(), text)", "def clear_relation(self, relation_name):\n try:\n conn = psycopg2.connect(\"dbname='{0}'\".format(DATABASE))\n cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)\n cur.execute(sql.SQL(\"DELETE FROM {}\").format(sql.Identifier(relation_name)))\n cur.execute(\"ALTER SEQUENCE {0}_index_seq RESTART WITH 1;\".format(relation_name))\n conn.commit()\n cur.close()\n except Exception as e:\n print(e)", "def __del__(self):\r\n self.save()\r\n self.close()", "def delete(self, *args, **kwargs):\n pass", "def delete(self, *args, **kwargs):\n pass", "def __delete__(self):\n pass", "def spatialitedbs_post_delete(sender, instance, **kwargs):\n instance.url.delete(False)", "def del_object_from_parent(self):\n if self.parent:\n self.parent.objects.pop(self.ref)", "def abstract_delete(self, model, id):\n record = self.abstract_get(model, id)\n rec_dict = self.dict_json(record)\n\n record.unlink()\n\n return rec_dict", "def delete_path():\n #TODO delete path from database\n pass", "def delete(self, obj):\n self.session.delete(obj)", "def delete(self)->None:\n database.cursor.execute(\n \"DELETE FROM {} WHERE id = %s\".format(self.table_name), (self.id))\n database.connection.commit()" ]
[ "0.75455254", "0.72018594", "0.7119035", "0.71088475", "0.700685", "0.70016056", "0.683571", "0.66466945", "0.6589308", "0.65799296", "0.65786785", "0.6519045", "0.64650804", "0.64650804", "0.64448905", "0.64448905", "0.64448905", "0.64448905", "0.64410865", "0.64339375", "0.6402628", "0.639001", "0.639001", "0.639001", "0.639001", "0.639001", "0.639001", "0.639001", "0.639001", "0.639001", "0.6366735", "0.6351409", "0.63455814", "0.6345394", "0.63299984", "0.63278383", "0.63203406", "0.6286665", "0.6269671", "0.62615895", "0.62407285", "0.622949", "0.6220662", "0.6212009", "0.6212009", "0.6210694", "0.61860645", "0.61824775", "0.618226", "0.61799693", "0.61700904", "0.615321", "0.6143109", "0.6135961", "0.6135961", "0.6135845", "0.6118409", "0.6116177", "0.6086657", "0.60574883", "0.60566366", "0.6029915", "0.6028449", "0.60261434", "0.60247827", "0.602148", "0.5997119", "0.59778106", "0.5971066", "0.5971066", "0.59706825", "0.59632623", "0.59514534", "0.5950753", "0.5948068", "0.59370613", "0.5926029", "0.5923629", "0.59136677", "0.5911819", "0.59026706", "0.5889989", "0.5884489", "0.5871072", "0.58704734", "0.58681566", "0.58681566", "0.5863814", "0.5858099", "0.5853636", "0.5847643", "0.5838637", "0.5838637", "0.58381724", "0.58354604", "0.58352184", "0.58263236", "0.5825364", "0.58247596", "0.58181244" ]
0.6050353
61
This button method is used to Change Tenancy state to Open.
def landlord_button_start(self): if self._context.get('is_landlord_rent'): self.write({'state': 'open', 'rent_entry_chck': False})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def toggle(self):\n self.open = not self.open", "def switch_state():\n\tDmg.OpenWindow()", "def open_restaurant(self):\n print(\"We're Open!\")", "def open_restaurant(self):\r\n print(\"The restaurant is open now \")", "def open_restaurant(self):\n\t\tprint(\"The restaurant is now open!\")", "def open_restaurant(self):\n print(f\"{self.restaurant_name} is now open!\")", "def open_restaurant(self):\n print(f\"{self.restaurant_name} is now open!\")", "def open_restaurant(self):\n print(f\"{self.restaurant_name} is now open!\")", "def open_restaurant(self):\n print(self.name.title() + \" is now open!\")", "def open_restaurant(self):\n print(self.name.title() + \" is now open!\")", "def open_restaurant(self):\n print(self.name.title() + \" is now open!\")", "def open_restaurant(self):\n print(self.name.title() + \" is now open!\")", "def open_restaurant(self):\n\t\tprint(f\"The {self.restaurant_name} is open now.\")", "def open_restaurant(self):\n\t\topen = f\"{self.restaurant_name} is now open.\"\n\t\tprint(f\"\\n{open}\")", "def open_restaurant(self):\n print(f\"\\nThe resturant {self.restaurant_name} is now open!\")", "def open(self):\n self.solenoid.set(self.OPEN)", "def open_restaurant(self):\n\t\tprint(\"restaurant is open\")", "def activate_button(self, e):\n self.serv_but.config(state=\"normal\")", "def open_restaurant(self):\r\n\t\tprint(self.restaurant_name.title() + \" is open\")", "def open_restaurant(self):\n msg = f\"{self.name} is open. Come on in!\"\n print(f\"\\n{msg}\")", "def open_restaurant(self):\n\t\tprint(f\"The restaurant is open.\")", "def on_stateico_clicked(self, *a):\n\t\tself.window1.set_property('visible', True)\n\t\tself.stateico.set_visible(False)\n\t\tself.window1.present()", "def enter(self):\n LOGGER.debug(\"State 'open' entered\")", "def tool_open_clicked(self, widget, data=None):\n self.open_chooser.show()", "def open_restaurant(self):\n msg = self.name + \" is open. Come on in!\"\n print(\"\\n\" + msg)", "def open(self):\n self.state = True\n self.mainwindow.sendMessage('a')\n print(\"opening \" + self.name)", "def gt_helper_clicked(self):\n if not self.gt_helper_open:\n self.gt_helper_open = True\n self.gt_helper.show()", "def open_restaurant(self):\n print(f'The Restaurant {self.restaurant_name} is opened...')", "def open(self):\n self._isOpen = True", "def toggle(self):", "def onClick(self):\n self.app.setActiveMode(\"start\")", "def toggle(self) -> None:", "def toggle(self) -> None:", "def switch_open_irf(self):\n self.ui.actionOpen_IRF_File.setEnabled(self.ui.separate_irf_checkBox.isChecked())", "def toggle(self) -> None:\n ...", "def toggle_valve():\n new_status = not tank_valve_open\n print(\"- Toggling valve status to '{}'.\".format(\"Open\" if new_status\n else \"Closed\"))\n set_valve_open(new_status)", "def activate(self):\n self.active = True", "def activate(self):\n self.active = True", "def _activate(self):\n self.active = True", "def open(self):\n self._data['open_drawer'] = True", "def activate(self):\n self._is_active = True", "def _isopen(self):\n return self.dp.state()==PyTango.DevState.OPEN", "def ToggleApprovalTracker(self, event):\n pass", "def open(self):\n\n self.st_time = time.strftime('%H:%M %A %d %B')\n self.is_active = True", "async def async_turn_on(self, **kwargs: Any) -> None:\n await self.call_state_change(\"open\")", "def act(self, state):\n return", "def open(self, wait=True):\n self.gripper.actuate(1, 1)", "def open(self):\n\n return self._state == states['open']", "def onOpen(self):", "def activate(self, state, action):\n self.state = state\n return Explorer.activate(self, state, action)", "def activate(self, state, action):\n self.state = state\n return Explorer.activate(self, state, action)", "def open(self):\n \n return self.set_level('up')", "def is_open(self):\n return self.name == \"open\"", "def downstreamToolClicked(self):\n self.openDock()\n self.downstream_tree_tool.setActive()", "def open_door(self):\n\t\tself.set_button_light(self.currentFloor, OUTPUT.IN_LIGHTS, 0)\n\t\tio.set_bit(OUTPUT.DOOR_OPEN, 1)\n\t\tself.doorTimer.start()", "def open(self):\n self._command = \"open\"", "def opened(self):\n self.emit('opened')", "def file_menu_open_activate(self, widget, data=None):\n self.open_chooser.show()", "def openTB1Settings(self):\n self.TB1_Window = QtWidgets.QDialog()\n self.TB1_ui = Ui_robotOneConfig()\n self.TB1_ui.setupUi(self.TB1_Window)\n self.TB1_Window.show()", "def open_changeset(self):\n pass", "def _clicked_yes_button(self):\n self.yes = True", "def __on_click(self):\n if self.enable:\n self.__function_to_activate()", "def open_restaurant(self):\n print(\"O Restaurante esta aberto\")", "def toggle(self):\n self._state.is_on = not self._state.is_on\n self.send_command(Command.TOGGLE, [])", "def OnButtonClick(self):\n self.choice()", "def on_rcrnode_open_btn_clicked(self):\n # self.rcrnode.init_node()\n self.rcrnode.resume()", "def on_tog_small_method(self, tog_small_class):\n self.txt_small_method.set_sensitive(tog_small_class.get_active())\n self.txt_small_method.set_text(\"0\")", "def __minimize_on_click(self):\n self.close()", "def set_active(self):\n self.active = True", "def set_state( self ):", "def set_open(self, player):\n\t\t#TODO: open and give the player the item contained.\n\t\tself.open = True\n\t\tself.default_image = self.open_chest_image #TODO: set open chest image\n\t\tself.image = self.open_chest_image\n\t\tif not self.contents_data: return # case for empty chest\n\t\titem = self.build_chest_item()\n\t\tkey = self.contents_data[ ITEM_KEY ]\n\t\tplayer.acquire_item( item, key )", "def toggled(self, *args, **kwargs): # real signature unknown\n pass", "def toggle(self, **kwargs):\n self.on = False if self.on else True", "def upstreamToolClicked(self):\n self.openDock()\n self.upstream_tree_tool.setActive()", "def is_open(self):\n return self.status == \"O\"", "def setOpenFiles(self):\n self.openFilesButton.setChecked(True)", "def click(self):\r\n pass", "def select_action(self, state):", "def toggle(self):\n if bool(self.show.get()):\n self.sub_frame.pack(fill=\"x\", expand=1)\n self.toggle_button.configure(text='-')\n else:\n self.sub_frame.forget()\n self.toggle_button.configure(text='+')", "async def legsessionopen(self, ctx):\n\n new_value = await self.toggle_dm_setting(ctx.author.id, \"leg_session_open\")\n\n if new_value:\n message = f\":white_check_mark: You will now receive DMs when you \" \\\n f\"are a {self.bot.mk.LEGISLATURE_LEGISLATOR_NAME} \" \\\n f\"and a new Legislative Session is opened.\"\n else:\n message = f\":white_check_mark: You will no longer receive DMs when you are \" \\\n f\"a {self.bot.mk.LEGISLATURE_LEGISLATOR_NAME} \" \\\n f\"and a new Legislative Session is opened.\"\n\n await ctx.send(message)", "def open_restaurant(self):\n return \"Restaurante esta aberto\"", "def is_open_for_betting(self):\n return self.is_open", "def openTB2Settings(self):\n self.TB2_Window = QtWidgets.QDialog()\n self.TB2_ui = Ui_robotTwoConfig()\n self.TB2_ui.setupUi(self.TB2_Window)\n self.TB2_Window.show()", "def onShowed(self):\n self.parent.actionTagTwo=\"\"\n pass", "def _openButton(self):\n #get the specified file\n selected_file = self.view.list.getSelected()\n\n if selected_file:\n self.model.open(selected_file)\n return\n\n #prompt if they really want to open maya\n dialogs = Dialogs(self.view)\n\n msg = 'No file selected!'\n msg += '\\n\\nAre you sure you want to open maya without a file?'\n dialogs.confirmPrompt(msg)\n\n self.model.open()", "def landlord_button_close(self):\n return self.write({'state': 'close'})", "def trigger_open(self):\n self.get_selected()\n if self.selected_item:\n self.controller.display_item(self.selected_item)", "def btn_create_reco(self):\n\t\tprint()\n\t\tprint('OH - btn_create_reco')\n\n\t\t# Init\n\t\tres_id = self.id\n\t\tres_model = _model_treatment\n\t\tview_id = self.env.ref('openhealth.treatment_2_form_view').id\n\n\t\t# Open\n\t\treturn {\n\t\t\t# Mandatory\n\t\t\t'type': _model_action,\n\t\t\t'name': 'Open Treatment Current',\n\t\t\t# Window action\n\t\t\t'priority': 1,\n\t\t\t'res_id': res_id,\n\t\t\t'res_model': res_model,\n\t\t\t#'view_id': view_id,\n\t\t\t# Views\n\t\t\t#\"views\": [[False, \"form\"]],\n\t\t\t\"views\": [[view_id, \"form\"]],\n\t\t\t'view_mode': 'form',\n\t\t\t'target': 'current',\n\t\t\t#\"domain\": [[\"patient\", \"=\", self.patient.name]],\n\t\t\t#'auto_search': False,\n\t\t\t'flags': {\n\t\t\t\t\t\t#'form': {'action_buttons': True, 'options': {'mode': 'edit'}}\n\t\t\t\t\t\t'form': {'action_buttons': False, }\n\t\t\t\t\t},\n\t\t\t'context': {\n\t\t\t\t\t\t#'default_treatment': treatment_id,\n\t\t\t\t\t}\n\t\t}", "def Open(self):\n self._is_open = True\n def closure(pane):\n if not pane.IsShown():\n pane.Show(True)\n self._PaneInfoOperation(closure)", "def _onOpen(self, event):\n self.openExperiment()", "def activate(self):\n pass", "def openTB3Settings(self):\n self.TB3_Window = QtWidgets.QDialog()\n self.TB3_ui = Ui_robotThreeConfig()\n self.TB3_ui.setupUi(self.TB3_Window)\n self.TB3_Window.show()", "def on_tog_god_class(self, tog_god_class):\n self.txt_god_class.set_sensitive(tog_god_class.get_active())\n self.txt_god_class.set_text(\"0\")", "def activated(self):", "def on_activate(self):", "def toggle(self):\n self._show = not self._show\n if self._show:\n self._sub_frame.pack(fill=tk.X, expand=1)\n self._toggle_button.configure(text='-')\n else:\n self._sub_frame.forget()\n self._toggle_button.configure(text='+')", "def cf_statechange(self, button, new_state):\n cfg_id = button.get_label()\n self.cf_dat.cf_statechange(cfg_id, new_state)\n cascade = self.cf_dat.config_dict[cfg_id]['cascade_list']\n if new_state == True:\n for opt in self.cf_dat.opt_dict.keys():\n self.opt_lw.button_dict[opt].set_state(False)\n\n for opt, imp in cascade:\n self.cf_dat.set_imp(opt, imp)\n self.opt_lw.button_dict[opt].set_state(True)\n\n for opt in self.cf_dat.opt_dict.keys():\n if self.cf_dat.opt_dict[opt]['permanent'] == True:\n self.opt_lw.button_dict[opt].set_state(True)\n\n if self.opt_lw.button_dict[opt].get_state() == False:\n self.cf_dat.set_imp(opt, 'none')\n self.opt_lw.button_dict[opt].set_label('{0}:none'.format(opt))\n\n return", "def on_activate(self) -> None:", "def on_okButton_clicked(self):\n self.accept=True", "def help_menu_about_activate(self, widget, data=None):\n print \"cmon\"\n button1 = gtk.Button(\"Press Me!\")\n self.fixed1.put(button1, 0, 0)\n button1.window.raise_()\n button1.show()\n button2 = gtk.Button(\"Prease Press Me!\")\n self.fixed1.put(button2, 380, 380)\n button2.show()\n button2.window.raise_()" ]
[ "0.6753454", "0.67351896", "0.6623262", "0.65252876", "0.63716906", "0.6349264", "0.6349264", "0.6349264", "0.63400394", "0.63400394", "0.63400394", "0.63400394", "0.6328688", "0.6301924", "0.628958", "0.6283317", "0.6266414", "0.6244455", "0.62006384", "0.619092", "0.6183897", "0.6144216", "0.6143789", "0.61283827", "0.6125809", "0.6118666", "0.61074406", "0.60479224", "0.60477394", "0.6003502", "0.5991883", "0.5965476", "0.5965476", "0.59560335", "0.5946765", "0.58614606", "0.5846774", "0.5846774", "0.58355016", "0.5825197", "0.5794247", "0.5776643", "0.57600343", "0.5751785", "0.57286584", "0.5717032", "0.5716945", "0.5706793", "0.5690198", "0.56869984", "0.56869984", "0.5675335", "0.5649951", "0.56370384", "0.5621918", "0.56138194", "0.56099015", "0.5609427", "0.55916715", "0.55882525", "0.5573707", "0.5564301", "0.5556609", "0.5556123", "0.5538465", "0.5533092", "0.5528007", "0.55276614", "0.5520511", "0.55106515", "0.5505057", "0.5494549", "0.54943746", "0.54787916", "0.54760116", "0.54729223", "0.5463459", "0.5453931", "0.5449415", "0.54428065", "0.54212517", "0.54173476", "0.54132223", "0.54128116", "0.5410521", "0.5404362", "0.53886336", "0.53860277", "0.53840166", "0.53835475", "0.53760743", "0.5373795", "0.53721833", "0.5371613", "0.53624886", "0.53595126", "0.5354927", "0.53520113", "0.5344084", "0.53431815" ]
0.6233518
18
This button method is used to open the related account payment form view.
def landlord_button_deposite_pay(self): payment_id = False acc_pay_form = self.env.ref( 'account.view_account_payment_form') account_jrnl_obj = self.env['account.journal'].search( [('type', '=', 'purchase')], limit=1) payment_obj = self.env['account.payment'] payment_method_id = self.env.ref( 'account.account_payment_method_manual_in') for tenancy_rec in self: if tenancy_rec.acc_pay_dep_rec_id and \ tenancy_rec.acc_pay_dep_rec_id.id: return { 'view_type': 'form', 'view_id': acc_pay_form.id, 'view_mode': 'form', 'res_model': 'account.payment', 'res_id': tenancy_rec.acc_pay_dep_rec_id.id, 'type': 'ir.actions.act_window', 'target': 'current', 'context': self._context, } if tenancy_rec.deposit == 0.00: raise Warning(_('Please Enter Deposit amount.')) if tenancy_rec.deposit < 0.00: raise Warning( _('The deposit amount must be strictly positive.')) vals = { 'partner_id': tenancy_rec.property_owner_id.parent_id.id, 'partner_type': 'supplier', 'journal_id': account_jrnl_obj.id, 'payment_type': 'outbound', 'communication': 'Deposit Received', 'tenancy_id': tenancy_rec.id, 'amount': tenancy_rec.deposit, 'property_id': tenancy_rec.property_id.id, 'payment_method_id': payment_method_id.id } payment_id = payment_obj.create(vals) return { 'view_mode': 'form', 'view_id': acc_pay_form.id, 'view_type': 'form', 'res_id': payment_id and payment_id.id, 'res_model': 'account.payment', 'type': 'ir.actions.act_window', 'nodestroy': True, 'target': 'current', 'domain': '[]', 'context': { 'close_after_process': True, } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def open_invoice(self, cr, uid, ids, context=None):\n if context is None:\n context = {}\n mod_obj = self.pool.get('ir.model.data')\n for advance_pay in self.browse(cr, uid, ids, context=context):\n form_res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_supplier_form')\n form_id = form_res and form_res[1] or False\n tree_res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_tree')\n tree_id = tree_res and tree_res[1] or False\n\n return {\n 'name': _('Advance Invoice'),\n 'view_type': 'form',\n 'view_mode': 'form,tree',\n 'res_model': 'account.invoice',\n 'res_id': int(context['invoice_id'][0]),\n 'view_id': False,\n 'views': [(form_id, 'form'), (tree_id, 'tree')],\n 'context': context,\n 'type': 'ir.actions.act_window',\n }", "def checkout_btn(self):\n self._checkout_btn.click()", "def click_submit_payment_button(self):\n self.click(self.submit_payment_locator)\n time.sleep(2)", "def on_OpenExplorerAccount_clicked(self):\n # TODO: not implemented yet\n #raise NotImplementedError\n url = f\"http://kfc.matrix.io/{self.a0_Address}\"\n\n self.browser.openurl(url)\n self.OnlyDisplay(f\"start {url}\")", "def open_accounts_page(self):\n log.info(\"In landing page: click bill view button\")\n bills_page_for_meters_link = self.driver.find_element(\n *self.link_to_accs_locator\n )\n bills_page_for_meters_link.click()\n self.driver.sleep(5)\n self.driver.switch_to.window(self.driver.window_handles[-1])", "def on_show_opc_relation(self):\n from OPCRelationDialog import QOPCRelationDialog\n\n dlg = QOPCRelationDialog(self)\n dlg.exec_()", "def proceed(request):\n if request.user.is_authenticated():\n return submit(request)\n agreement_form = forms.DevAgreementForm({'read_dev_agreement': True},\n instance=None, request=request)\n return render(request, 'submit/terms.html',\n {'step': 'terms', 'agreement_form': agreement_form,\n 'proceed': True})", "def on_OpenExplorerWallet_clicked(self):\n # TODO: not implemented yet\n #raise NotImplementedError\n url = \"http://wallet.matrix.io\"\n\n self.browser.openurl(url)\n self.OnlyDisplay(f\"start {url}\")", "def open_create_partner(self, cr, uid, ids, context=None):\n view_obj = self.pool.get('ir.ui.view')\n view_id = view_obj.search(cr, uid, [('model', '=', self._name), \\\n ('name', '=', self._name+'.view')])\n return {\n 'view_mode': 'form',\n 'view_type': 'form',\n 'view_id': view_id or False,\n 'res_model': self._name,\n 'context': context,\n 'type': 'ir.actions.act_window',\n 'target': 'new',\n }", "def on_OpenWallet_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def visionActivada(\n self,\n ):\n Ventana_Formulario(self.root_controler)", "def click_request_new_deal_button(self):\n self.click_element(self.request_new_deal_button_locator)", "def payment(self, **post):\n cr, uid, context = request.cr, request.uid, request.context\n payment_obj = request.registry.get('payment.acquirer')\n sale_order_obj = request.registry.get('sale.order')\n\n order = request.website.sale_get_order(context=context)\n order.write({'usersess': request.session['webcalc_session_id']})\n #order.env.cr.commit()\n redirection = self.checkout_redirection(order)\n if redirection:\n return redirection\n\n shipping_partner_id = False\n if order:\n if order.partner_shipping_id.id:\n shipping_partner_id = order.partner_shipping_id.id\n else:\n shipping_partner_id = order.partner_invoice_id.id\n\n values = {\n 'order': request.registry['sale.order'].browse(cr, SUPERUSER_ID, order.id, context=context),\n 'usersess': request.session['webcalc_session_id']\n }\n values['errors'] = sale_order_obj._get_errors(cr, uid, order, context=context)\n values.update(sale_order_obj._get_website_data(cr, uid, order, context))\n\n if not values['errors']:\n acquirer_ids = payment_obj.search(cr, SUPERUSER_ID, [('website_published', '=', True), ('company_id', '=', order.company_id.id)], context=context)\n values['acquirers'] = list(payment_obj.browse(cr, uid, acquirer_ids, context=context))\n render_ctx = dict(context, submit_class='btn btn-primary', submit_txt=_('Завершить оформление'))\n for acquirer in values['acquirers']:\n acquirer.button = payment_obj.render(\n cr, SUPERUSER_ID, acquirer.id,\n '/',\n order.amount_total,\n order.pricelist_id.currency_id.id,\n partner_id=shipping_partner_id,\n tx_values={\n 'return_url': '/shop/payment/validate',\n },\n context=render_ctx)\n #vips_shop\n return request.website.render(\"vips_shop.payment\", values)", "def get_view():\n\n gateway = DS_CONFIG[\"gateway_account_id\"]\n gateway_ok = gateway and len(gateway) > 25\n\n return render_template(\n \"eg014_collect_payment.html\",\n title=\"Order form with payment\",\n source_file=path.basename(path.dirname(__file__)) + \"/controller.py\",\n source_url=DS_CONFIG[\"github_example_url\"] + path.basename(path.dirname(__file__)) + \"/controller.py\",\n documentation=DS_CONFIG[\"documentation\"] + eg,\n show_doc=DS_CONFIG[\"documentation\"],\n signer_name=DS_CONFIG[\"signer_name\"],\n signer_email=DS_CONFIG[\"signer_email\"],\n gateway_ok=gateway_ok\n )", "def btn_create_reco(self):\n\t\tprint()\n\t\tprint('OH - btn_create_reco')\n\n\t\t# Init\n\t\tres_id = self.id\n\t\tres_model = _model_treatment\n\t\tview_id = self.env.ref('openhealth.treatment_2_form_view').id\n\n\t\t# Open\n\t\treturn {\n\t\t\t# Mandatory\n\t\t\t'type': _model_action,\n\t\t\t'name': 'Open Treatment Current',\n\t\t\t# Window action\n\t\t\t'priority': 1,\n\t\t\t'res_id': res_id,\n\t\t\t'res_model': res_model,\n\t\t\t#'view_id': view_id,\n\t\t\t# Views\n\t\t\t#\"views\": [[False, \"form\"]],\n\t\t\t\"views\": [[view_id, \"form\"]],\n\t\t\t'view_mode': 'form',\n\t\t\t'target': 'current',\n\t\t\t#\"domain\": [[\"patient\", \"=\", self.patient.name]],\n\t\t\t#'auto_search': False,\n\t\t\t'flags': {\n\t\t\t\t\t\t#'form': {'action_buttons': True, 'options': {'mode': 'edit'}}\n\t\t\t\t\t\t'form': {'action_buttons': False, }\n\t\t\t\t\t},\n\t\t\t'context': {\n\t\t\t\t\t\t#'default_treatment': treatment_id,\n\t\t\t\t\t}\n\t\t}", "def click_edit_target_buy_policy_button(self):\n self.click_element(self.edit_target_buy_policy_button_locator)", "def view_account(request, recurring_payment_id, guid=None,\n template_name=\"recurring_payments/index.html\"):\n rp = get_object_or_404(RecurringPayment, pk=recurring_payment_id)\n\n # only admin or user self can access this page\n if not (request.user.is_authenticated() and\n (request.user.profile.is_superuser\n or request.user.id == rp.user.id) or rp.guid == guid):\n raise Http403\n\n paid_payment_transactions = PaymentTransaction.objects.filter(\n recurring_payment=rp,\n status=True\n )\n if paid_payment_transactions:\n last_paid_payment_transaction = paid_payment_transactions[0]\n else:\n last_paid_payment_transaction = None\n\n failed_payment_transactions = PaymentTransaction.objects.filter(\n recurring_payment=rp,\n status=False\n )\n if failed_payment_transactions:\n last_failed_payment_transaction = failed_payment_transactions[0]\n else:\n last_failed_payment_transaction = None\n\n display_failed_transaction = False\n if last_failed_payment_transaction:\n if not last_paid_payment_transaction or \\\n last_failed_payment_transaction.create_dt \\\n > last_paid_payment_transaction.create_dt:\n display_failed_transaction = True\n\n if not rp.trial_amount:\n rp.trial_amount = 0\n\n # rp_invoices\n rp_invoices = RecurringPaymentInvoice.objects.filter(\n recurring_payment=rp\n ).order_by('-billing_cycle_start_dt')\n\n # payment transactions\n payment_transactions = PaymentTransaction.objects.filter(\n recurring_payment=rp\n ).order_by('-create_dt')\n\n # get ready for the add/update payment method button\n test_mode = get_test_mode()\n is_active = (rp.status_detail == 'active')\n if is_active:\n #rp.populate_payment_profile()\n payment_profiles = PaymentProfile.objects.filter(\n customer_profile_id=rp.customer_profile_id,\n status=True, status_detail='active')\n if payment_profiles:\n payment_profile = payment_profiles[0]\n else:\n payment_profile = None\n\n else:\n payment_profile = None\n\n is_owner = request.user.id == rp.user.id\n\n num_accounts = RecurringPayment.objects.filter(user=rp.user).count()\n\n return render_to_response(template_name, {\n 'rp': rp,\n 'display_failed_transaction': display_failed_transaction,\n 'last_paid_payment_transaction': last_paid_payment_transaction,\n 'last_failed_payment_transaction': last_failed_payment_transaction,\n 'rp_invoices': rp_invoices,\n 'payment_transactions': payment_transactions,\n 'payment_profile': payment_profile,\n 'test_mode': test_mode,\n 'is_active': is_active,\n 'is_owner': is_owner,\n 'num_accounts': num_accounts,\n 'memberships': rp.memberships,\n 'STRIPE_PUBLISHABLE_KEY': getattr(settings, 'STRIPE_PUBLISHABLE_KEY', '')\n },\n context_instance=RequestContext(request))", "def click_reference_rates_show_search_form_link(self):\n self.click_element(self.reference_rates_show_search_form_link_locator)", "def select_account(self, account_id: str):\n account_number_box = self.driver.find_or_raise(self.AccountNumberBoxSelector)\n account_number_box.send_keys(account_id)\n account_number_box.send_keys(Keys.ENTER)\n self.driver.sleep(2)\n self.driver.find_or_raise('//button[.=\"View\"]', xpath=True).click()\n self.driver.sleep(2)", "def form_valid(self, form):\n auth_login(self.request, form.get_user())\n if self.request.session.get('payment'):\n Payment.objects.filter(id=self.request.session['payment']).update(\n user_id=self.request.user.revolvuserprofile, entrant_id=self.request.user.revolvuserprofile)\n payment = Payment.objects.get(id=self.request.session['payment'])\n Tip.objects.filter(id=payment.tip_id).update(user_id=self.request.user.revolvuserprofile)\n Project.objects.get(id=payment.project_id).donors.add(self.request.user.revolvuserprofile)\n AnonymousUserDonation.objects.filter(payment_id=self.request.session['payment']).delete()\n del self.request.session['payment']\n\n # messages.success(self.request, 'Logged in as ' + self.request.POST.get('username'))\n # return redirect(reverse('project:view', kwargs={'title':title})+'?amount='+amount+'&tip='+tip)\n messages.success(self.request, 'Logged in as ' + self.request.POST.get('username'))\n return redirect(self.next_url)", "def on_CurrentradioButton_clicked(self):\n # TODO: not implemented yet\n # raise NotImplementedError\n print(\"Select current cash deposit. If you select it, cash will be released after 7 days\")\n self.select_actor = \"Month0\"", "def hit_send_payment_button(self):\n\t\telement = Element(driver=self.driver,\n\t\t explicit_wait_time=self.explicit_wait_time,\n\t\t locator=BillPayPageLocator.SEND_PAYMENT_BUTTON)\n\t\telement.click_on()\n\t\treturn None", "def contribute_by_payment_mode(request, campaign_id, payment_mode, template='campaign/campaign_contribution_form_%s.html'):\r\n campaign = get_object_or_404(Campaign.objects.active(), pk=campaign_id)\r\n payment_option = campaign.artist.get_merchant_account(payment_mode)\r\n if not payment_option:\r\n raise Http404\r\n template = template % payment_option.payment_mode\r\n ContribForm = getattr(forms, '%sContributionForm' % payment_option.payment_mode.title())\r\n ctx = {'campaign':campaign, 'c':campaign, 'payment_option':payment_option}\r\n proceed_to_pay = False\r\n if campaign.is_free:\r\n return HttpResponseRedirect(reverse('contribute_to_campaign', kwargs={'campaign_id':campaign_id}))\r\n try:\r\n if request.POST:\r\n form = ContribForm(campaign=campaign, user_profile=request.user.get_profile(), data=request.POST)\r\n if form.is_valid():\r\n pending_contrib = form.save(commit=True)\r\n _log.info('Pending %s contribution recorded: %s', payment_option.payment_mode_name, pending_contrib)\r\n proceed_to_pay = True\r\n str_list = [str(k) for k in (campaign.pk, int(time()), pending_contrib.pk, request.user.pk)]\r\n pending_contrib.invoice_num = ''.join(str_list)\r\n ctx['contrib'] = pending_contrib\r\n else:\r\n form = ContribForm(campaign=campaign, user_profile=request.user.get_profile())\r\n ctx.update({'form':form, 'proceed_to_pay':proceed_to_pay})\r\n except CampaignError, e:\r\n request.user.message_set.create(message=e.message)\r\n return HttpResponseRedirect(reverse('view_campaign', kwargs={'campaign_id':campaign.pk}))\r\n return render_view(request, template, ctx)", "def action_view_invoice_salon(self):\n return {\n 'name': 'Invoices',\n 'domain': [('invoice_origin', '=', self.name)],\n 'res_model': 'account.move',\n 'view_id': False,\n 'view_mode': 'tree,form',\n 'type': 'ir.actions.act_window',\n }", "def payment_method_user(self) -> \"CheckoutPage\":\n self.accordion = PaymentMethodUser(self.driver)\n self.accordion.btn_agree.click()\n self.accordion.btn_continue.click()\n return self", "def anon_contribute_by_payment_mode(request, campaign_id, payment_mode, template='campaign/campaign_contribution_form_%s_anon.html'):\r\n campaign = get_object_or_404(Campaign.objects.active(), pk=campaign_id)\r\n payment_option = campaign.artist.get_merchant_account(payment_mode)\r\n if not payment_option:\r\n raise Http404\r\n template = template % payment_option.payment_mode\r\n ctx = {'campaign':campaign, 'c':campaign, 'payment_option':payment_option, 'is_anon':True, 'proceed_to_pay':True}\r\n if campaign.is_free:\r\n return HttpResponseRedirect(reverse('contribute_to_campaign', kwargs={'campaign_id':campaign_id}))\r\n return render_view(request, template, ctx)", "def on12Lead(self, event): # wxGlade: DAQPanel.<event_handler>\n CreateDialog2 = Lead12Dialog2(self,self)\n CreateDialog2.ShowModal()", "def _openButton(self):\n #get the specified file\n selected_file = self.view.list.getSelected()\n\n if selected_file:\n self.model.open(selected_file)\n return\n\n #prompt if they really want to open maya\n dialogs = Dialogs(self.view)\n\n msg = 'No file selected!'\n msg += '\\n\\nAre you sure you want to open maya without a file?'\n dialogs.confirmPrompt(msg)\n\n self.model.open()", "def proceed_to_checkout_and_payment(self):\r\n # 1- summary\r\n logger.info('starting wizard with summary')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '.cart_navigation a.standard-checkout')))\r\n self.automation.driver.execute_script(\"document.querySelectorAll('.cart_navigation a.standard-checkout')[0]\"\r\n \".click()\")\r\n\r\n # 2-sign in & 3-address\r\n logger.info('2-sign in & 3-address')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, 'button[name=\"processAddress\"]')))\r\n\r\n self.automation.driver.find_element_by_css_selector('button[name=\"processAddress\"]').click()\r\n\r\n # 4- shipping\r\n logger.info('4- shipping')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '#uniform-cgv span')))\r\n\r\n is_checked = self.automation.driver.find_element_by_css_selector('#uniform-cgv span').get_attribute('class')\r\n if not is_checked: # agree\r\n self.automation.driver.execute_script(\"document.querySelectorAll('#cgv')[0].click()\")\r\n\r\n self.automation.driver.find_element_by_css_selector('button[name=processCarrier]').click()\r\n logger.info('agree and confirmed')\r\n\r\n # pay by bank wire\r\n logger.info('pay by bank wire')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '.payment_module a')))\r\n\r\n self.automation.driver.find_element_by_css_selector('.payment_module a').click()\r\n\r\n # 5- payment and confirm\r\n logger.info('5- payment and confirm')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '#cart_navigation button')))\r\n self.automation.driver.find_element_by_css_selector('#cart_navigation button').click()\r\n\r\n # back to orders\r\n logger.info('back to orders')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, 'p.cart_navigation .button-exclusive.btn')))\r\n self.automation.driver.find_element_by_css_selector('p.cart_navigation .button-exclusive.btn').click()\r\n\r\n # how many items do you have\r\n time.sleep(1.5)\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '#order-list tbody tr')))\r\n items = self.automation.driver.find_elements_by_css_selector('#order-list tbody tr')\r\n logger.info(f'You have \"{len(items)}\" at your order')", "def choose_new_address(self) -> \"CheckoutPage\":\n self.accordion = BillingDetailsUser(self.driver)\n self.accordion.btn_new_address.click()\n return self", "def open_account():\n print(\"\\n\")\n print(messages.open_account)\n u_id = pyip.inputInt(\"Id: \", greaterThan=0)\n name = pyip.inputCustom(raiseNameError, prompt=\"Name: \")\n address = pyip.inputCustom(raiseAddressError, prompt=\"Address: \")\n email = pyip.inputEmail(\"Email: \")\n balance = pyip.inputInt(\"Balance: \", min=0)\n password = pyip.inputPassword(\"Password: \")\n\n user_data = [u_id, name, address, balance, email, password]\n result = BankOperationsBackend.open_account(user_data)\n\n start_again() if result else BankOperationsUi.open_account()", "def goto_create(self):\n\n self.create.click()", "def cb_about_show(self, button): \n print(\"About Dialog show\") \n self.about_dialog = self.builder.get_object(\"about_dialog\") \n self.about_dialog.show_all()", "def click_add_financial_charges_button(self):\n self.number_of_financial_charges_before_create = self.get_number_of_financial_charges_in_the_grid()\n self.click_element(self.add_financial_charges_button_locator, hover=True)", "def clickViewProfile(self):\n self.waitForElement(locator=self._viewProfileBtn, locatorType=\"xpath\")\n element = self.getElementList(locator=self._viewProfileBtn, locatorType=\"xpath\")\n self.elementClick(element=element[0])", "def click_button(self):\n self.widgets.get('button').click()", "def get_form_class(self):\n if self.survey.get_requires_payment():\n return AuthorizenetSurveyPurchaseForm\n return super(AuthorizenetSurveyPurchaseCreate, self).get_form_class()", "def button_fac_ent(self):\n invoice = self._fac_ent()\n\n # imprime factura\n datas = {\n 'ids': invoice.ids,\n 'model': 'account.report_invoice',\n 'form': invoice.read()\n }\n return {\n 'type': 'ir.actions.report.xml',\n 'report_name': 'aeroo_report_ar_einvoice',\n 'datas': datas,\n }", "def on_pushButton_query_account_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def show(cls, context: DataContext, project: ResearchProject, parent):\n dialog = cls(context, project, parent)\n dialog.exec_()", "def custom_field_popup_action(self):\n if self.popup.get_option():\n custom_options = self.controller.get_minefield_options()[\"custom\"]\n self.controller.set_difficulty(custom_options)\n return Action(\"goto generating view\", [])\n return None", "def submit_and_back(self):\n self.submit(skip_confirm=True)\n self.parent().do_detail()", "def submit_and_back(self):\n self.submit(skip_confirm=True)\n self.parent().do_detail()", "def step1(request):\n\tif request.method == 'POST':\n\t\tform = CustomerBuyForm(request.POST)\n\t\tif form.is_valid():\n\t\t\tcust = form.save(commit=False)\n\t\t\tcust.customer = request.user\n\t\t\tcust.save()\n\t\t\treturn HttpResponseRedirect(\"/comprador/\")\n\telse:\n\t\tform = CustomerBuyForm()\n\ttemplate = 'customerbuy/step1.html'\n\treturn render_to_response(template,context_instance=RequestContext(request,{'form': form} ))", "def click_add_new_record_button(self):\n self.click_element(self.add_new_record_button_locator, True)", "def contact_linkup(self, request, pk):\n obj_api = api()\n title_contact = \"Tu contacto Linkup\"\n token = request.session['token']\n resp = obj_api.get(slug='sellers/' + pk + \"/\", token=token)\n return render(request, 'frontend/actors/client/my_account.html', {'data_user': resp, \n 'title_contact': title_contact})", "def name(self) -> Text:\n\n return \"cc_payment_form\"", "def i_navigate_to_contact_link():\n driver.find_element_by_id(\"contact_link\").click()", "def get(self, request, *args, **kwargs):\n\n # Access will be granted in Complete view if payment_id matches.\n payment_id = self.execute_payment()\n # Check if payment id belongs to a Catalog donation -> product_id is set\n donation = Donation.objects.confirm_by_reference(payment_id)\n\n flow_type = 'one_time'\n url = reverse('become_supporter_complete') + \\\n '?payment_id={}'.format(payment_id)\n if donation.product_id:\n flow_type ='product_support'\n url += '&flow_type={}&product_id={}'.format(flow_type, donation.product_id)\n if donation.sponsored_event_dedication:\n flow_type = 'event_sponsorship'\n url += '&flow_type={}&event_id={}'.format(flow_type, donation.sponsored_event_id)\n\n if flow_type == 'event_sponsorship':\n custom_send_receipt(receipt_type=flow_type,\n amount=donation.amount, user=donation.user,\n dedication=donation.sponsored_event_dedication,\n musician=donation.sponsored_event.leader_string(),\n event_date=donation.sponsored_event.get_date())\n else:\n custom_send_receipt(receipt_type='one_time',\n amount=donation.amount, user=donation.user)\n\n return redirect(url)", "def confirm(request, virtualpos_type):\n return djangovirtualpos_views.confirm_payment(request, virtualpos_type, CreditCardReference)", "def custom_actions(self, form_wizard_entry, request=None):", "def _open_form_frame(self):\n logger.debug(\"Waiting for the form frame\")\n WebDriverWait(self.driver, self.timeout).until(\n EC.visibility_of_element_located(self.locators.FORM_FRAME)\n )\n\n logger.debug(\"Open the form frame\")\n url = \"https://www.iowacourts.state.ia.us/ESAWebApp/TrialCourtStateWide\"\n self.driver.get(url)\n\n logger.debug(\"Waiting for the tabs\")\n WebDriverWait(self.driver, self.timeout).until(\n EC.visibility_of_element_located(self.locators.CASE_ID_TAB)\n )", "def click_login_button(self):", "def hide_invoice_order_button(self):\n invoice_order_action = self.env.ref('sale.action_view_sale_advance_payment_inv')\n if invoice_order_action and invoice_order_action.binding_model_id:\n invoice_order_action.binding_model_id = False", "def OnButtonRateHelpButton(self, event):\r\n\t\twebbrowser.open(consts.URL_HELP_RATE)", "def showEditContact(self):", "def activate_account(self):\n self.driver.execute_script(\"window.scrollTo(0, 1000)\")\n self.click_on_element_by_css(tep.ACTIVATION_LINK)\n self.click_on_element_by_css(tep.ACTIVATION_BUTTON)", "def click_buy_and_sell_deal_create_button(self):\n self.click_element(self.save_vendor_profile_locator)", "def submit(request):\n if not request.user.is_authenticated():\n return proceed(request)\n # If dev has already agreed, continue to next step.\n user = UserProfile.objects.get(pk=request.user.id)\n if not user.read_dev_agreement:\n return redirect('submit.app.terms')\n return manifest(request)", "def onclick_open_camera_button(self):\n self.openCam.show()", "def exposition_submit(request, pk):\n success_url = request.GET.get('next', None)\n if 'add-another' in request.POST:\n success_url = reverse('expo_submit')\n context = get_navbar_context()\n context.update(\n get_breadcrumbs(request.path, web_breadcrumb_dict)\n )\n\n form_kwargs = {'user':request.user}\n if pk:\n exposition = get_object_or_404(Exposition, pk=pk)\n form_kwargs.update({'instance':exposition})\n else:\n exposition = None\n \n if request.method == 'POST': # If the form has been submitted...\n form = ExpositionForm(request.POST, request.FILES, **form_kwargs)\n if form.is_valid(): # All validation rules pass\n obj = form.save()\n messages.success(\n request,\n _('The exposition has been submitted correctly.')\n )\n if success_url is not None:\n return HttpResponseRedirect(success_url)\n else:\n return HttpResponseRedirect(obj.get_absolute_url())\n \n else:\n messages.warning(request, _('Error submitting the exposition.'))\n else:\n form = ExpositionForm(**form_kwargs)\n \n context.update({\n 'object':exposition,\n 'form':form,\n 'success_url':success_url\n })\n \n return render(request, 'web/home/expo_submit.html', context)", "def process_show_form(self, request, step, form):\n pass", "def openTB4Settings(self):\n self.TB4_Window = QtWidgets.QDialog()\n self.TB4_ui = Ui_robotFourConfig()\n self.TB4_ui.setupUi(self.TB4_Window)\n self.TB4_Window.show()", "def button_fac_cob_ent(self):\n invoice = self._fac_ent()\n\n # pagar la factura\n # hacer configuracion para modificar esto\n receipt_obj = self.env['account.voucher.receiptbook']\n receipt = receipt_obj.search([('name', 'like', 'Recibos')], limit=1)\n\n journal = self.journal_id\n res = invoice.invoice_pay_customer()\n context = res['context']\n\n account_voucher_obj = self.env['account.voucher']\n voucher = account_voucher_obj.create({\n 'partner_id': context['default_partner_id'],\n 'journal_id': journal.id,\n 'account_id': journal.default_debit_account_id.id,\n 'type': context['type'],\n 'amount': context['default_amount'],\n 'net_amount': context['default_amount'],\n 'receiptbook_id': receipt.id,\n 'company_id': self.env.user.company_id.id\n })\n voucher.signal_workflow('proforma_voucher')\n\n account_move_line_obj = self.env['account.move.line']\n\n # obtener un recordser vacio\n lines2rec = account_move_line_obj.browse()\n\n # obtener las lineas a conciliar de facturas\n account_move_line = account_move_line_obj.search(\n [('document_number', '=', invoice.document_number)])\n for re in account_move_line:\n if re.account_id.reconcile:\n lines2rec += re\n\n # obtener las lineas a conciliar de pagos\n account_move_line = account_move_line_obj.search(\n [('document_number', '=', voucher.document_number)])\n for re in account_move_line:\n if re.account_id.reconcile:\n lines2rec += re\n\n period_obj = self.env['account.period']\n period = period_obj.find()\n\n # reconciliar las lineas de factura con pagos\n lines2rec.reconcile('manual',\n journal.default_debit_account_id.id, # writeoff_acc_id\n period.id, # writeoff_period_id,\n journal.id) # writeoff_journal_id)\n\n # imprime factura\n datas = {\n 'ids': invoice.ids,\n 'model': 'account.report_invoice',\n 'form': invoice.read()\n }\n return {\n 'type': 'ir.actions.report.xml',\n 'report_name': 'aeroo_report_ar_einvoice',\n 'datas': datas,\n }", "def funding_view(request, slug, id):\n company =get_object_or_404(Company,slug=slug)\n edit = validate_user_company_access_or_redirect(request,company)\n funding_reference = get_object_or_404(Funding, id=id,company=company)\n\n return render_to_response('funding_form.html', \n {'details': funding_reference,'info':funding_reference},\n context_instance=RequestContext(request))", "def action_goto(self):\n dialog = GoToDialog(self)\n dialog.exec()\n\n # Re-focus the main window\n self.activateWindow()", "def dispatch(self, *args, **kwargs):\r\n return super(PaymentFakeView, self).dispatch(*args, **kwargs)", "def on_ConfirmWalletOP_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def click_login_button(self):\n submit_button = self.locate_element_by_css_selector(LOGIN_BUTTON_SELECTPR)\n submit_button.click()", "def OnButtonSubmitterPageButton(self, event):\r\n\t\twebbrowser.open(self._configtmp[\"imageurl\"])", "def click_submit_button(self):\n self.click(by_locator=self.__ASK_QUESTION_PAGE_ASK_QUESTION_BUTTON)", "def send_by_email(self):\r\n ir_model_data = self.env['ir.model.data']\r\n try:\r\n template_id = ir_model_data.get_object_reference(\r\n 'ng_church', 'email_template_church_pledge_report')[1]\r\n except ValueError:\r\n template_id = False\r\n try:\r\n compose_form_id = ir_model_data.get_object_reference(\r\n 'mail', 'email_compose_message_wizard_form')[1]\r\n except ValueError:\r\n compose_form_id = False\r\n ctx = dict(self._context)\r\n ctx.update({\r\n 'default_model': 'church.pledge',\r\n 'default_res_id': self._ids[0],\r\n 'default_use_template': bool(template_id),\r\n 'default_template_id': template_id,\r\n 'default_composition_mode': 'comment',\r\n })\r\n return {\r\n 'name': _('Compose Email'),\r\n 'type': 'ir.actions.act_window',\r\n 'view_type': 'form',\r\n 'view_mode': 'form',\r\n 'res_model': 'mail.compose.message',\r\n 'views': [(compose_form_id, 'form')],\r\n 'view_id': compose_form_id,\r\n 'target': 'new',\r\n 'context': ctx,\r\n }", "def onCheckout(self, controller):\n \n if askokcancel(\"Proceed\", \"Pay the order?\"):\n c = controller.customer\n package = {'customer_id':c.id, 'order_price':c.my_order.GetTotalPrice}\n msg = controller.transmit(package)\n \n if msg['order_received']:\n c.CheckOut(c.my_order.GetTotalPrice)\n c.Clear()\n controller.show_frame(PageThree)", "def input_payment_details(self):\n pass", "def gt_helper_clicked(self):\n if not self.gt_helper_open:\n self.gt_helper_open = True\n self.gt_helper.show()", "def payment_transaction(self, acquirer_id):\n cr, uid, context = request.cr, request.uid, request.context\n payment_obj = request.registry.get('payment.acquirer')\n transaction_obj = request.registry.get('payment.transaction')\n order = request.website.sale_get_order(context=context)\n\n if not order or not order.order_line or acquirer_id is None:\n return request.redirect(\"/shop/checkout\")\n\n assert order.partner_id.id != request.website.partner_id.id\n\n # find an already existing transaction\n tx = request.website.sale_get_transaction()\n if tx:\n tx_id = tx.id\n if tx.sale_order_id.id != order.id or tx.state in ['error', 'cancel'] or tx.acquirer_id.id != acquirer_id:\n tx = False\n tx_id = False\n elif tx.state == 'draft': # button cliked but no more info -> rewrite on tx or create a new one ?\n tx.write(dict(transaction_obj.on_change_partner_id(cr, SUPERUSER_ID, None, order.partner_id.id, context=context).get('values', {}), amount=order.amount_total))\n if not tx:\n tx_id = transaction_obj.create(cr, SUPERUSER_ID, {\n 'acquirer_id': acquirer_id,\n 'type': 'form',\n 'amount': order.amount_total,\n 'currency_id': order.pricelist_id.currency_id.id,\n 'partner_id': order.partner_id.id,\n 'partner_country_id': order.partner_id.country_id.id,\n 'reference': request.env['payment.transaction'].get_next_reference(order.name),\n 'sale_order_id': order.id,\n }, context=context)\n request.session['sale_transaction_id'] = tx_id\n tx = transaction_obj.browse(cr, SUPERUSER_ID, tx_id, context=context)\n\n # update quotation\n request.registry['sale.order'].write(\n cr, SUPERUSER_ID, [order.id], {\n 'payment_acquirer_id': acquirer_id,\n 'payment_tx_id': request.session['sale_transaction_id']\n }, context=context)\n\n return payment_obj.render(\n cr, SUPERUSER_ID, tx.acquirer_id.id,\n tx.reference,\n order.amount_total,\n order.pricelist_id.currency_id.id,\n partner_id=order.partner_shipping_id.id or order.partner_invoice_id.id,\n tx_values={\n 'return_url': '/shop/payment/validate',\n },\n context=dict(context, submit_class='btn btn-primary', submit_txt=_('Оформить')))", "def submit(self):\n self.driver.find_element(*BaseLocators.SUBMIT_BUTTON).click()", "def web_payment(request):\n context = {\"token\": request.user.auth_token.key}\n return render(request, \"payment/payment.html\", context=context)", "def action_next(self, cr, uid, ids, context=None):\n survey_obj = self.pool.get('survey')\n search_obj = self.pool.get('ir.ui.view')\n if context is None: context = {}\n\n this = self.browse(cr, uid, ids, context=context)[0]\n survey_id = this.survey_id.id\n context.update({'survey_id': survey_id, 'sur_name_id': this.id})\n cr.execute('select count(id) from survey_history where user_id=%s\\\n and survey_id=%s' % (uid,survey_id))\n\n res = cr.fetchone()[0]\n sur_rec = survey_obj.browse(cr,uid,survey_id,context=context)\n if sur_rec.response_user and res >= sur_rec.response_user:\n raise osv.except_osv(_('Warning!'),_(\"You cannot give response for this survey more than %s times.\") % (sur_rec.response_user))\n\n if sur_rec.max_response_limit and sur_rec.max_response_limit <= sur_rec.tot_start_survey:\n raise osv.except_osv(_('Warning!'),_(\"You cannot give more responses. Please contact the author of this survey for further assistance.\"))\n\n search_id = search_obj.search(cr,uid,[('model','=','survey.question.wiz'),('name','=','Survey Search')])\n return {\n 'view_type': 'form',\n \"view_mode\": 'form',\n 'res_model': 'survey.question.wiz',\n 'type': 'ir.actions.act_window',\n 'target': 'new',\n 'search_view_id': search_id[0],\n 'context': context\n }", "def adv_new_window(self):\n adv=workflow.advancedoptions_w.ADialog()\n adv.exec_()", "def open_case_number_search_tab(self):\n # Open the frame where the form is hiding\n self._open_form_frame()\n\n # Click into the trial court search\n logger.debug(\"Clicking the case number search tab\")\n self.click(self.locators.CASE_ID_TAB)", "def make_form(self):", "def open_invoices(self):\n return {\n 'domain': \"[('id', 'in', \" + str(self.invoice_ids.ids) + \" )]\",\n 'name': 'Invoices',\n 'view_mode': 'tree,form',\n 'res_model': 'account.move',\n 'type': 'ir.actions.act_window',\n }", "def CreateAccount():\n login_frame.forget()\n self.LoadCreateAccountWindow()", "def __fill_applicant_form(self, profile):\r\n\r\n actions = ActionChains(self.driver)\r\n actions.send_keys(profile['name'] + Keys.TAB + \\\r\n profile['email'] + Keys.TAB + \\\r\n profile['phone_number'] + Keys.TAB)\r\n actions.perform()", "def checkout(self): \n mtool = getToolByName(self.context, \"portal_membership\")\n ICheckoutManagement(self.context).redirectToNextURL(\"AFTER_START\")", "def presssubmitdesign(self):\n self.mode.submitDesign(self.myDesign)", "def initiate_payment(self, order: Order) -> str:\n raise NotImplementedError", "def show_user_detail_form():\n\n return render_template(\"add-user-details.html\")", "def click_button(self):\n self.q(css='div#fixture button').first.click()", "def open(self, acct, password, blockchain, signals):\n self.setAccountHandlers(blockchain, signals)\n self.selectedAccount = self.openAccount = self.acctManager.openAccount(acct, password)\n return self", "def open_ride_edit_form(self) -> object:\n self.container.find_element(*self._ride_edit).click()\n\n return EditForm(self).wait_for_component_to_be_present()", "def click_related_list_button(self, heading, button_title):\n self.load_related_list(heading)\n locator = lex_locators[\"record\"][\"related\"][\"button\"].format(\n heading, button_title\n )\n self._jsclick(locator)\n self.wait_until_modal_is_open()", "def show_form():\n\n return render_template(\"form.html\")", "def submit_order(request, orderid):\n if request.user.is_staff:\n order = WorkingOrder.objects.get(pk=orderid)\n else:\n order = request.user.workingorder_set.get(id=orderid) \n\n if order.status != BaseOrder.Const.DEALER_EDIT:\n return HttpResponseServerError()\n \n # always submit orders in the context of proper account\n account = order.owner.get_profile().account\n \n if request.method == 'GET': \n form = SubmitForm(instance=order)\n else:\n form = SubmitForm(request.POST, instance=order)\n if form.is_valid():\n order = form.save(commit=False)\n cost = order.cost or decimal.Decimal() \n if cost > account.credit_balance:\n ## users account doesn't have enough juice.. send then to the ecom engine \n ## to pay, then get them back here ...\n order = form.save()\n products = [form.cleaned_data['design_product']]\n option = form.cleaned_data.get('processing_option', None)\n if option:\n products.append(option) \n new_cart(request, products)\n request.method = 'GET' \n return paypal_checkout(request, success_url=reverse('submit-order', args=[orderid]))\n else: \n register_design_order(order.owner, order.owner.get_profile().account, order, cost)\n order = form.save(commit=False)\n order.status = BaseOrder.Const.SUBMITTED\n order.submitted = datetime.now()\n order.save()\n # return HttpResponseRedirect('completed_order_summary', args=[orderid]) # TODO\n return HttpResponseRedirect(reverse('submit-order-completed', args=[order.id])) \n return dict(order=order, form=form)", "def get_absolute_url(self):\n return reverse('payment-detail', args=[str(self.id)])", "def __add_credit_menu(self):\n log.debug(\"Displaying __add_credit_menu\")\n # Create a payment methods keyboard\n keyboard = list()\n # Add the supported payment methods to the keyboard\n # Cash\n keyboard.append([telegram.KeyboardButton(self.loc.get(\"menu_cash\"))])\n # Telegram Payments\n if self.cfg.ccard[\"credit_card_token\"] != \"\":\n keyboard.append([telegram.KeyboardButton(self.loc.get(\"menu_credit_card\"))])\n # Keyboard: go back to the previous menu\n keyboard.append([telegram.KeyboardButton(self.loc.get(\"menu_all_cancel\"))])\n # Send the keyboard to the user\n self.bot.send_message(self.chat.id, self.loc.get(\"conversation_payment_method\"),\n reply_markup=telegram.ReplyKeyboardMarkup(keyboard, one_time_keyboard=True))\n # Wait for a reply from the user\n selection = self.__wait_for_specific_message(\n [self.loc.get(\"menu_cash\"), self.loc.get(\"menu_credit_card\"), self.loc.get(\"menu_all_cancel\")],\n cancellable=True)\n # If the user has selected the Cash option...\n if selection == self.loc.get(\"menu_cash\"):\n # Go to the pay with cash function\n self.bot.send_message(self.chat.id,\n self.loc.get(\"payment_cash\", user_cash_id=self.user.identifiable_str()))\n # If the user has selected the Credit Card option...\n elif selection == self.loc.get(\"menu_credit_card\"):\n # Go to the pay with credit card function\n self.__add_credit_cc()\n # If the user has selected the Cancel option...\n elif isinstance(selection, CancelSignal):\n # Send him back to the previous menu\n return", "def btn_create_order_con(self):\n\t\tprint()\n\t\tprint('btn_create_order_con')\n\n\t\t# Init\n\t\t\n\t\t# Search Partner\n\t\tpartner = tre_funcs.get_partner(self, self.patient.name)\n\n\t\t# Search pricelist\n\t\tpricelist = tre_funcs.get_pricelist(self)\n\n\t\t# Search product\n\t\tname = 'CONSULTA MEDICA'\n\t\tprice_list = '2019'\n\t\tproduct = tre_funcs.get_product_product(self, name, price_list)\n\n\t\t# Check \n\t\tproduct_template = tre_funcs.get_product_template(self, name, price_list)\n\t\ttre_funcs.check_product(self, '2019', product, product_template)\n\n\t\t# Create order \n\t\torder = pl_creates.create_order_con(self, partner.id, pricelist.id, product)\n\t\t\n\t\t# Open Order\n\t\treturn action_funcs.open_order(order)", "def show(self):\n self.Show()", "def on_preferencesButton_clicked(self):\n e5App().getObject(\"UserInterface\").showPreferences(\"translatorPage\")" ]
[ "0.61552536", "0.60529786", "0.59445393", "0.5872724", "0.5840807", "0.57562137", "0.5702436", "0.56132597", "0.5595778", "0.5586985", "0.55588377", "0.55404526", "0.548458", "0.54655564", "0.5451618", "0.5413497", "0.5408232", "0.5404972", "0.5403777", "0.5348176", "0.5343124", "0.5334868", "0.53246135", "0.5319278", "0.5282372", "0.52773255", "0.5270212", "0.5265884", "0.52438617", "0.5213501", "0.5211571", "0.5206802", "0.52053267", "0.5195327", "0.51938885", "0.5192371", "0.51890266", "0.51466286", "0.5143261", "0.5134438", "0.5129826", "0.51179165", "0.51179165", "0.5111322", "0.51107156", "0.5098083", "0.5089617", "0.5089369", "0.50869375", "0.50855136", "0.50770783", "0.50734437", "0.50688136", "0.5066496", "0.50652814", "0.5060815", "0.50394636", "0.50363594", "0.49909008", "0.4990259", "0.49588668", "0.49548018", "0.49504793", "0.49465302", "0.49460372", "0.49360344", "0.4931126", "0.49199182", "0.49121496", "0.49085608", "0.489614", "0.48953384", "0.48870167", "0.4882536", "0.48795655", "0.48772502", "0.4877224", "0.48765168", "0.48695296", "0.4869299", "0.48650718", "0.48596516", "0.48574063", "0.48431078", "0.48337588", "0.4833162", "0.48297733", "0.48264906", "0.48256144", "0.4824949", "0.4821755", "0.48198935", "0.48193574", "0.4819193", "0.48124823", "0.48094678", "0.48061353", "0.48059586", "0.48024207", "0.48009703" ]
0.52446413
28
This button method is used to open the related account payment form view.
def landlord_button_deposite_received(self): payment_id = False acc_pay_form = self.env.ref( 'account.view_account_payment_form') account_jrnl_obj = self.env['account.journal'].search( [('type', '=', 'sale')], limit=1) payment_obj = self.env['account.payment'] payment_method_id = self.env.ref( 'account.account_payment_method_manual_in') for tenancy_rec in self: if tenancy_rec.acc_pay_dep_rec_id and \ tenancy_rec.acc_pay_dep_rec_id.id: return { 'view_type': 'form', 'view_id': acc_pay_form.id, 'view_mode': 'form', 'res_model': 'account.payment', 'res_id': tenancy_rec.acc_pay_dep_rec_id.id, 'type': 'ir.actions.act_window', 'target': 'current', 'context': self._context, } if tenancy_rec.deposit == 0.00: raise Warning(_('Please Enter Deposit amount.')) if tenancy_rec.deposit < 0.00: raise Warning( _('The deposit amount must be strictly positive.')) vals = { 'partner_id': tenancy_rec.property_owner_id.parent_id.id, 'partner_type': 'customer', 'journal_id': account_jrnl_obj.id, 'payment_type': 'inbound', 'communication': 'Deposit Received', 'tenancy_id': tenancy_rec.id, 'amount': tenancy_rec.deposit, 'property_id': tenancy_rec.property_id.id, 'payment_method_id': payment_method_id.id } payment_id = payment_obj.create(vals) return { 'view_mode': 'form', 'view_id': acc_pay_form.id, 'view_type': 'form', 'res_id': payment_id and payment_id.id, 'res_model': 'account.payment', 'type': 'ir.actions.act_window', 'nodestroy': True, 'target': 'current', 'domain': '[]', 'context': { 'close_after_process': True, } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def open_invoice(self, cr, uid, ids, context=None):\n if context is None:\n context = {}\n mod_obj = self.pool.get('ir.model.data')\n for advance_pay in self.browse(cr, uid, ids, context=context):\n form_res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_supplier_form')\n form_id = form_res and form_res[1] or False\n tree_res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_tree')\n tree_id = tree_res and tree_res[1] or False\n\n return {\n 'name': _('Advance Invoice'),\n 'view_type': 'form',\n 'view_mode': 'form,tree',\n 'res_model': 'account.invoice',\n 'res_id': int(context['invoice_id'][0]),\n 'view_id': False,\n 'views': [(form_id, 'form'), (tree_id, 'tree')],\n 'context': context,\n 'type': 'ir.actions.act_window',\n }", "def checkout_btn(self):\n self._checkout_btn.click()", "def click_submit_payment_button(self):\n self.click(self.submit_payment_locator)\n time.sleep(2)", "def on_OpenExplorerAccount_clicked(self):\n # TODO: not implemented yet\n #raise NotImplementedError\n url = f\"http://kfc.matrix.io/{self.a0_Address}\"\n\n self.browser.openurl(url)\n self.OnlyDisplay(f\"start {url}\")", "def open_accounts_page(self):\n log.info(\"In landing page: click bill view button\")\n bills_page_for_meters_link = self.driver.find_element(\n *self.link_to_accs_locator\n )\n bills_page_for_meters_link.click()\n self.driver.sleep(5)\n self.driver.switch_to.window(self.driver.window_handles[-1])", "def on_show_opc_relation(self):\n from OPCRelationDialog import QOPCRelationDialog\n\n dlg = QOPCRelationDialog(self)\n dlg.exec_()", "def proceed(request):\n if request.user.is_authenticated():\n return submit(request)\n agreement_form = forms.DevAgreementForm({'read_dev_agreement': True},\n instance=None, request=request)\n return render(request, 'submit/terms.html',\n {'step': 'terms', 'agreement_form': agreement_form,\n 'proceed': True})", "def on_OpenExplorerWallet_clicked(self):\n # TODO: not implemented yet\n #raise NotImplementedError\n url = \"http://wallet.matrix.io\"\n\n self.browser.openurl(url)\n self.OnlyDisplay(f\"start {url}\")", "def open_create_partner(self, cr, uid, ids, context=None):\n view_obj = self.pool.get('ir.ui.view')\n view_id = view_obj.search(cr, uid, [('model', '=', self._name), \\\n ('name', '=', self._name+'.view')])\n return {\n 'view_mode': 'form',\n 'view_type': 'form',\n 'view_id': view_id or False,\n 'res_model': self._name,\n 'context': context,\n 'type': 'ir.actions.act_window',\n 'target': 'new',\n }", "def on_OpenWallet_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def visionActivada(\n self,\n ):\n Ventana_Formulario(self.root_controler)", "def click_request_new_deal_button(self):\n self.click_element(self.request_new_deal_button_locator)", "def payment(self, **post):\n cr, uid, context = request.cr, request.uid, request.context\n payment_obj = request.registry.get('payment.acquirer')\n sale_order_obj = request.registry.get('sale.order')\n\n order = request.website.sale_get_order(context=context)\n order.write({'usersess': request.session['webcalc_session_id']})\n #order.env.cr.commit()\n redirection = self.checkout_redirection(order)\n if redirection:\n return redirection\n\n shipping_partner_id = False\n if order:\n if order.partner_shipping_id.id:\n shipping_partner_id = order.partner_shipping_id.id\n else:\n shipping_partner_id = order.partner_invoice_id.id\n\n values = {\n 'order': request.registry['sale.order'].browse(cr, SUPERUSER_ID, order.id, context=context),\n 'usersess': request.session['webcalc_session_id']\n }\n values['errors'] = sale_order_obj._get_errors(cr, uid, order, context=context)\n values.update(sale_order_obj._get_website_data(cr, uid, order, context))\n\n if not values['errors']:\n acquirer_ids = payment_obj.search(cr, SUPERUSER_ID, [('website_published', '=', True), ('company_id', '=', order.company_id.id)], context=context)\n values['acquirers'] = list(payment_obj.browse(cr, uid, acquirer_ids, context=context))\n render_ctx = dict(context, submit_class='btn btn-primary', submit_txt=_('Завершить оформление'))\n for acquirer in values['acquirers']:\n acquirer.button = payment_obj.render(\n cr, SUPERUSER_ID, acquirer.id,\n '/',\n order.amount_total,\n order.pricelist_id.currency_id.id,\n partner_id=shipping_partner_id,\n tx_values={\n 'return_url': '/shop/payment/validate',\n },\n context=render_ctx)\n #vips_shop\n return request.website.render(\"vips_shop.payment\", values)", "def get_view():\n\n gateway = DS_CONFIG[\"gateway_account_id\"]\n gateway_ok = gateway and len(gateway) > 25\n\n return render_template(\n \"eg014_collect_payment.html\",\n title=\"Order form with payment\",\n source_file=path.basename(path.dirname(__file__)) + \"/controller.py\",\n source_url=DS_CONFIG[\"github_example_url\"] + path.basename(path.dirname(__file__)) + \"/controller.py\",\n documentation=DS_CONFIG[\"documentation\"] + eg,\n show_doc=DS_CONFIG[\"documentation\"],\n signer_name=DS_CONFIG[\"signer_name\"],\n signer_email=DS_CONFIG[\"signer_email\"],\n gateway_ok=gateway_ok\n )", "def btn_create_reco(self):\n\t\tprint()\n\t\tprint('OH - btn_create_reco')\n\n\t\t# Init\n\t\tres_id = self.id\n\t\tres_model = _model_treatment\n\t\tview_id = self.env.ref('openhealth.treatment_2_form_view').id\n\n\t\t# Open\n\t\treturn {\n\t\t\t# Mandatory\n\t\t\t'type': _model_action,\n\t\t\t'name': 'Open Treatment Current',\n\t\t\t# Window action\n\t\t\t'priority': 1,\n\t\t\t'res_id': res_id,\n\t\t\t'res_model': res_model,\n\t\t\t#'view_id': view_id,\n\t\t\t# Views\n\t\t\t#\"views\": [[False, \"form\"]],\n\t\t\t\"views\": [[view_id, \"form\"]],\n\t\t\t'view_mode': 'form',\n\t\t\t'target': 'current',\n\t\t\t#\"domain\": [[\"patient\", \"=\", self.patient.name]],\n\t\t\t#'auto_search': False,\n\t\t\t'flags': {\n\t\t\t\t\t\t#'form': {'action_buttons': True, 'options': {'mode': 'edit'}}\n\t\t\t\t\t\t'form': {'action_buttons': False, }\n\t\t\t\t\t},\n\t\t\t'context': {\n\t\t\t\t\t\t#'default_treatment': treatment_id,\n\t\t\t\t\t}\n\t\t}", "def click_edit_target_buy_policy_button(self):\n self.click_element(self.edit_target_buy_policy_button_locator)", "def view_account(request, recurring_payment_id, guid=None,\n template_name=\"recurring_payments/index.html\"):\n rp = get_object_or_404(RecurringPayment, pk=recurring_payment_id)\n\n # only admin or user self can access this page\n if not (request.user.is_authenticated() and\n (request.user.profile.is_superuser\n or request.user.id == rp.user.id) or rp.guid == guid):\n raise Http403\n\n paid_payment_transactions = PaymentTransaction.objects.filter(\n recurring_payment=rp,\n status=True\n )\n if paid_payment_transactions:\n last_paid_payment_transaction = paid_payment_transactions[0]\n else:\n last_paid_payment_transaction = None\n\n failed_payment_transactions = PaymentTransaction.objects.filter(\n recurring_payment=rp,\n status=False\n )\n if failed_payment_transactions:\n last_failed_payment_transaction = failed_payment_transactions[0]\n else:\n last_failed_payment_transaction = None\n\n display_failed_transaction = False\n if last_failed_payment_transaction:\n if not last_paid_payment_transaction or \\\n last_failed_payment_transaction.create_dt \\\n > last_paid_payment_transaction.create_dt:\n display_failed_transaction = True\n\n if not rp.trial_amount:\n rp.trial_amount = 0\n\n # rp_invoices\n rp_invoices = RecurringPaymentInvoice.objects.filter(\n recurring_payment=rp\n ).order_by('-billing_cycle_start_dt')\n\n # payment transactions\n payment_transactions = PaymentTransaction.objects.filter(\n recurring_payment=rp\n ).order_by('-create_dt')\n\n # get ready for the add/update payment method button\n test_mode = get_test_mode()\n is_active = (rp.status_detail == 'active')\n if is_active:\n #rp.populate_payment_profile()\n payment_profiles = PaymentProfile.objects.filter(\n customer_profile_id=rp.customer_profile_id,\n status=True, status_detail='active')\n if payment_profiles:\n payment_profile = payment_profiles[0]\n else:\n payment_profile = None\n\n else:\n payment_profile = None\n\n is_owner = request.user.id == rp.user.id\n\n num_accounts = RecurringPayment.objects.filter(user=rp.user).count()\n\n return render_to_response(template_name, {\n 'rp': rp,\n 'display_failed_transaction': display_failed_transaction,\n 'last_paid_payment_transaction': last_paid_payment_transaction,\n 'last_failed_payment_transaction': last_failed_payment_transaction,\n 'rp_invoices': rp_invoices,\n 'payment_transactions': payment_transactions,\n 'payment_profile': payment_profile,\n 'test_mode': test_mode,\n 'is_active': is_active,\n 'is_owner': is_owner,\n 'num_accounts': num_accounts,\n 'memberships': rp.memberships,\n 'STRIPE_PUBLISHABLE_KEY': getattr(settings, 'STRIPE_PUBLISHABLE_KEY', '')\n },\n context_instance=RequestContext(request))", "def click_reference_rates_show_search_form_link(self):\n self.click_element(self.reference_rates_show_search_form_link_locator)", "def select_account(self, account_id: str):\n account_number_box = self.driver.find_or_raise(self.AccountNumberBoxSelector)\n account_number_box.send_keys(account_id)\n account_number_box.send_keys(Keys.ENTER)\n self.driver.sleep(2)\n self.driver.find_or_raise('//button[.=\"View\"]', xpath=True).click()\n self.driver.sleep(2)", "def form_valid(self, form):\n auth_login(self.request, form.get_user())\n if self.request.session.get('payment'):\n Payment.objects.filter(id=self.request.session['payment']).update(\n user_id=self.request.user.revolvuserprofile, entrant_id=self.request.user.revolvuserprofile)\n payment = Payment.objects.get(id=self.request.session['payment'])\n Tip.objects.filter(id=payment.tip_id).update(user_id=self.request.user.revolvuserprofile)\n Project.objects.get(id=payment.project_id).donors.add(self.request.user.revolvuserprofile)\n AnonymousUserDonation.objects.filter(payment_id=self.request.session['payment']).delete()\n del self.request.session['payment']\n\n # messages.success(self.request, 'Logged in as ' + self.request.POST.get('username'))\n # return redirect(reverse('project:view', kwargs={'title':title})+'?amount='+amount+'&tip='+tip)\n messages.success(self.request, 'Logged in as ' + self.request.POST.get('username'))\n return redirect(self.next_url)", "def on_CurrentradioButton_clicked(self):\n # TODO: not implemented yet\n # raise NotImplementedError\n print(\"Select current cash deposit. If you select it, cash will be released after 7 days\")\n self.select_actor = \"Month0\"", "def hit_send_payment_button(self):\n\t\telement = Element(driver=self.driver,\n\t\t explicit_wait_time=self.explicit_wait_time,\n\t\t locator=BillPayPageLocator.SEND_PAYMENT_BUTTON)\n\t\telement.click_on()\n\t\treturn None", "def contribute_by_payment_mode(request, campaign_id, payment_mode, template='campaign/campaign_contribution_form_%s.html'):\r\n campaign = get_object_or_404(Campaign.objects.active(), pk=campaign_id)\r\n payment_option = campaign.artist.get_merchant_account(payment_mode)\r\n if not payment_option:\r\n raise Http404\r\n template = template % payment_option.payment_mode\r\n ContribForm = getattr(forms, '%sContributionForm' % payment_option.payment_mode.title())\r\n ctx = {'campaign':campaign, 'c':campaign, 'payment_option':payment_option}\r\n proceed_to_pay = False\r\n if campaign.is_free:\r\n return HttpResponseRedirect(reverse('contribute_to_campaign', kwargs={'campaign_id':campaign_id}))\r\n try:\r\n if request.POST:\r\n form = ContribForm(campaign=campaign, user_profile=request.user.get_profile(), data=request.POST)\r\n if form.is_valid():\r\n pending_contrib = form.save(commit=True)\r\n _log.info('Pending %s contribution recorded: %s', payment_option.payment_mode_name, pending_contrib)\r\n proceed_to_pay = True\r\n str_list = [str(k) for k in (campaign.pk, int(time()), pending_contrib.pk, request.user.pk)]\r\n pending_contrib.invoice_num = ''.join(str_list)\r\n ctx['contrib'] = pending_contrib\r\n else:\r\n form = ContribForm(campaign=campaign, user_profile=request.user.get_profile())\r\n ctx.update({'form':form, 'proceed_to_pay':proceed_to_pay})\r\n except CampaignError, e:\r\n request.user.message_set.create(message=e.message)\r\n return HttpResponseRedirect(reverse('view_campaign', kwargs={'campaign_id':campaign.pk}))\r\n return render_view(request, template, ctx)", "def action_view_invoice_salon(self):\n return {\n 'name': 'Invoices',\n 'domain': [('invoice_origin', '=', self.name)],\n 'res_model': 'account.move',\n 'view_id': False,\n 'view_mode': 'tree,form',\n 'type': 'ir.actions.act_window',\n }", "def payment_method_user(self) -> \"CheckoutPage\":\n self.accordion = PaymentMethodUser(self.driver)\n self.accordion.btn_agree.click()\n self.accordion.btn_continue.click()\n return self", "def anon_contribute_by_payment_mode(request, campaign_id, payment_mode, template='campaign/campaign_contribution_form_%s_anon.html'):\r\n campaign = get_object_or_404(Campaign.objects.active(), pk=campaign_id)\r\n payment_option = campaign.artist.get_merchant_account(payment_mode)\r\n if not payment_option:\r\n raise Http404\r\n template = template % payment_option.payment_mode\r\n ctx = {'campaign':campaign, 'c':campaign, 'payment_option':payment_option, 'is_anon':True, 'proceed_to_pay':True}\r\n if campaign.is_free:\r\n return HttpResponseRedirect(reverse('contribute_to_campaign', kwargs={'campaign_id':campaign_id}))\r\n return render_view(request, template, ctx)", "def on12Lead(self, event): # wxGlade: DAQPanel.<event_handler>\n CreateDialog2 = Lead12Dialog2(self,self)\n CreateDialog2.ShowModal()", "def _openButton(self):\n #get the specified file\n selected_file = self.view.list.getSelected()\n\n if selected_file:\n self.model.open(selected_file)\n return\n\n #prompt if they really want to open maya\n dialogs = Dialogs(self.view)\n\n msg = 'No file selected!'\n msg += '\\n\\nAre you sure you want to open maya without a file?'\n dialogs.confirmPrompt(msg)\n\n self.model.open()", "def proceed_to_checkout_and_payment(self):\r\n # 1- summary\r\n logger.info('starting wizard with summary')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '.cart_navigation a.standard-checkout')))\r\n self.automation.driver.execute_script(\"document.querySelectorAll('.cart_navigation a.standard-checkout')[0]\"\r\n \".click()\")\r\n\r\n # 2-sign in & 3-address\r\n logger.info('2-sign in & 3-address')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, 'button[name=\"processAddress\"]')))\r\n\r\n self.automation.driver.find_element_by_css_selector('button[name=\"processAddress\"]').click()\r\n\r\n # 4- shipping\r\n logger.info('4- shipping')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '#uniform-cgv span')))\r\n\r\n is_checked = self.automation.driver.find_element_by_css_selector('#uniform-cgv span').get_attribute('class')\r\n if not is_checked: # agree\r\n self.automation.driver.execute_script(\"document.querySelectorAll('#cgv')[0].click()\")\r\n\r\n self.automation.driver.find_element_by_css_selector('button[name=processCarrier]').click()\r\n logger.info('agree and confirmed')\r\n\r\n # pay by bank wire\r\n logger.info('pay by bank wire')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '.payment_module a')))\r\n\r\n self.automation.driver.find_element_by_css_selector('.payment_module a').click()\r\n\r\n # 5- payment and confirm\r\n logger.info('5- payment and confirm')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '#cart_navigation button')))\r\n self.automation.driver.find_element_by_css_selector('#cart_navigation button').click()\r\n\r\n # back to orders\r\n logger.info('back to orders')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, 'p.cart_navigation .button-exclusive.btn')))\r\n self.automation.driver.find_element_by_css_selector('p.cart_navigation .button-exclusive.btn').click()\r\n\r\n # how many items do you have\r\n time.sleep(1.5)\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '#order-list tbody tr')))\r\n items = self.automation.driver.find_elements_by_css_selector('#order-list tbody tr')\r\n logger.info(f'You have \"{len(items)}\" at your order')", "def landlord_button_deposite_pay(self):\n payment_id = False\n acc_pay_form = self.env.ref(\n 'account.view_account_payment_form')\n account_jrnl_obj = self.env['account.journal'].search(\n [('type', '=', 'purchase')], limit=1)\n payment_obj = self.env['account.payment']\n payment_method_id = self.env.ref(\n 'account.account_payment_method_manual_in')\n for tenancy_rec in self:\n if tenancy_rec.acc_pay_dep_rec_id and \\\n tenancy_rec.acc_pay_dep_rec_id.id:\n return {\n 'view_type': 'form',\n 'view_id': acc_pay_form.id,\n 'view_mode': 'form',\n 'res_model': 'account.payment',\n 'res_id': tenancy_rec.acc_pay_dep_rec_id.id,\n 'type': 'ir.actions.act_window',\n 'target': 'current',\n 'context': self._context,\n }\n if tenancy_rec.deposit == 0.00:\n raise Warning(_('Please Enter Deposit amount.'))\n if tenancy_rec.deposit < 0.00:\n raise Warning(\n _('The deposit amount must be strictly positive.'))\n vals = {\n 'partner_id': tenancy_rec.property_owner_id.parent_id.id,\n 'partner_type': 'supplier',\n 'journal_id': account_jrnl_obj.id,\n 'payment_type': 'outbound',\n 'communication': 'Deposit Received',\n 'tenancy_id': tenancy_rec.id,\n 'amount': tenancy_rec.deposit,\n 'property_id': tenancy_rec.property_id.id,\n 'payment_method_id': payment_method_id.id\n }\n payment_id = payment_obj.create(vals)\n return {\n 'view_mode': 'form',\n 'view_id': acc_pay_form.id,\n 'view_type': 'form',\n 'res_id': payment_id and payment_id.id,\n 'res_model': 'account.payment',\n 'type': 'ir.actions.act_window',\n 'nodestroy': True,\n 'target': 'current',\n 'domain': '[]',\n 'context': {\n 'close_after_process': True,\n }\n }", "def choose_new_address(self) -> \"CheckoutPage\":\n self.accordion = BillingDetailsUser(self.driver)\n self.accordion.btn_new_address.click()\n return self", "def open_account():\n print(\"\\n\")\n print(messages.open_account)\n u_id = pyip.inputInt(\"Id: \", greaterThan=0)\n name = pyip.inputCustom(raiseNameError, prompt=\"Name: \")\n address = pyip.inputCustom(raiseAddressError, prompt=\"Address: \")\n email = pyip.inputEmail(\"Email: \")\n balance = pyip.inputInt(\"Balance: \", min=0)\n password = pyip.inputPassword(\"Password: \")\n\n user_data = [u_id, name, address, balance, email, password]\n result = BankOperationsBackend.open_account(user_data)\n\n start_again() if result else BankOperationsUi.open_account()", "def goto_create(self):\n\n self.create.click()", "def cb_about_show(self, button): \n print(\"About Dialog show\") \n self.about_dialog = self.builder.get_object(\"about_dialog\") \n self.about_dialog.show_all()", "def clickViewProfile(self):\n self.waitForElement(locator=self._viewProfileBtn, locatorType=\"xpath\")\n element = self.getElementList(locator=self._viewProfileBtn, locatorType=\"xpath\")\n self.elementClick(element=element[0])", "def click_add_financial_charges_button(self):\n self.number_of_financial_charges_before_create = self.get_number_of_financial_charges_in_the_grid()\n self.click_element(self.add_financial_charges_button_locator, hover=True)", "def click_button(self):\n self.widgets.get('button').click()", "def get_form_class(self):\n if self.survey.get_requires_payment():\n return AuthorizenetSurveyPurchaseForm\n return super(AuthorizenetSurveyPurchaseCreate, self).get_form_class()", "def button_fac_ent(self):\n invoice = self._fac_ent()\n\n # imprime factura\n datas = {\n 'ids': invoice.ids,\n 'model': 'account.report_invoice',\n 'form': invoice.read()\n }\n return {\n 'type': 'ir.actions.report.xml',\n 'report_name': 'aeroo_report_ar_einvoice',\n 'datas': datas,\n }", "def on_pushButton_query_account_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def show(cls, context: DataContext, project: ResearchProject, parent):\n dialog = cls(context, project, parent)\n dialog.exec_()", "def custom_field_popup_action(self):\n if self.popup.get_option():\n custom_options = self.controller.get_minefield_options()[\"custom\"]\n self.controller.set_difficulty(custom_options)\n return Action(\"goto generating view\", [])\n return None", "def submit_and_back(self):\n self.submit(skip_confirm=True)\n self.parent().do_detail()", "def submit_and_back(self):\n self.submit(skip_confirm=True)\n self.parent().do_detail()", "def step1(request):\n\tif request.method == 'POST':\n\t\tform = CustomerBuyForm(request.POST)\n\t\tif form.is_valid():\n\t\t\tcust = form.save(commit=False)\n\t\t\tcust.customer = request.user\n\t\t\tcust.save()\n\t\t\treturn HttpResponseRedirect(\"/comprador/\")\n\telse:\n\t\tform = CustomerBuyForm()\n\ttemplate = 'customerbuy/step1.html'\n\treturn render_to_response(template,context_instance=RequestContext(request,{'form': form} ))", "def click_add_new_record_button(self):\n self.click_element(self.add_new_record_button_locator, True)", "def contact_linkup(self, request, pk):\n obj_api = api()\n title_contact = \"Tu contacto Linkup\"\n token = request.session['token']\n resp = obj_api.get(slug='sellers/' + pk + \"/\", token=token)\n return render(request, 'frontend/actors/client/my_account.html', {'data_user': resp, \n 'title_contact': title_contact})", "def name(self) -> Text:\n\n return \"cc_payment_form\"", "def i_navigate_to_contact_link():\n driver.find_element_by_id(\"contact_link\").click()", "def get(self, request, *args, **kwargs):\n\n # Access will be granted in Complete view if payment_id matches.\n payment_id = self.execute_payment()\n # Check if payment id belongs to a Catalog donation -> product_id is set\n donation = Donation.objects.confirm_by_reference(payment_id)\n\n flow_type = 'one_time'\n url = reverse('become_supporter_complete') + \\\n '?payment_id={}'.format(payment_id)\n if donation.product_id:\n flow_type ='product_support'\n url += '&flow_type={}&product_id={}'.format(flow_type, donation.product_id)\n if donation.sponsored_event_dedication:\n flow_type = 'event_sponsorship'\n url += '&flow_type={}&event_id={}'.format(flow_type, donation.sponsored_event_id)\n\n if flow_type == 'event_sponsorship':\n custom_send_receipt(receipt_type=flow_type,\n amount=donation.amount, user=donation.user,\n dedication=donation.sponsored_event_dedication,\n musician=donation.sponsored_event.leader_string(),\n event_date=donation.sponsored_event.get_date())\n else:\n custom_send_receipt(receipt_type='one_time',\n amount=donation.amount, user=donation.user)\n\n return redirect(url)", "def confirm(request, virtualpos_type):\n return djangovirtualpos_views.confirm_payment(request, virtualpos_type, CreditCardReference)", "def custom_actions(self, form_wizard_entry, request=None):", "def _open_form_frame(self):\n logger.debug(\"Waiting for the form frame\")\n WebDriverWait(self.driver, self.timeout).until(\n EC.visibility_of_element_located(self.locators.FORM_FRAME)\n )\n\n logger.debug(\"Open the form frame\")\n url = \"https://www.iowacourts.state.ia.us/ESAWebApp/TrialCourtStateWide\"\n self.driver.get(url)\n\n logger.debug(\"Waiting for the tabs\")\n WebDriverWait(self.driver, self.timeout).until(\n EC.visibility_of_element_located(self.locators.CASE_ID_TAB)\n )", "def click_login_button(self):", "def hide_invoice_order_button(self):\n invoice_order_action = self.env.ref('sale.action_view_sale_advance_payment_inv')\n if invoice_order_action and invoice_order_action.binding_model_id:\n invoice_order_action.binding_model_id = False", "def OnButtonRateHelpButton(self, event):\r\n\t\twebbrowser.open(consts.URL_HELP_RATE)", "def showEditContact(self):", "def activate_account(self):\n self.driver.execute_script(\"window.scrollTo(0, 1000)\")\n self.click_on_element_by_css(tep.ACTIVATION_LINK)\n self.click_on_element_by_css(tep.ACTIVATION_BUTTON)", "def click_buy_and_sell_deal_create_button(self):\n self.click_element(self.save_vendor_profile_locator)", "def onclick_open_camera_button(self):\n self.openCam.show()", "def submit(request):\n if not request.user.is_authenticated():\n return proceed(request)\n # If dev has already agreed, continue to next step.\n user = UserProfile.objects.get(pk=request.user.id)\n if not user.read_dev_agreement:\n return redirect('submit.app.terms')\n return manifest(request)", "def exposition_submit(request, pk):\n success_url = request.GET.get('next', None)\n if 'add-another' in request.POST:\n success_url = reverse('expo_submit')\n context = get_navbar_context()\n context.update(\n get_breadcrumbs(request.path, web_breadcrumb_dict)\n )\n\n form_kwargs = {'user':request.user}\n if pk:\n exposition = get_object_or_404(Exposition, pk=pk)\n form_kwargs.update({'instance':exposition})\n else:\n exposition = None\n \n if request.method == 'POST': # If the form has been submitted...\n form = ExpositionForm(request.POST, request.FILES, **form_kwargs)\n if form.is_valid(): # All validation rules pass\n obj = form.save()\n messages.success(\n request,\n _('The exposition has been submitted correctly.')\n )\n if success_url is not None:\n return HttpResponseRedirect(success_url)\n else:\n return HttpResponseRedirect(obj.get_absolute_url())\n \n else:\n messages.warning(request, _('Error submitting the exposition.'))\n else:\n form = ExpositionForm(**form_kwargs)\n \n context.update({\n 'object':exposition,\n 'form':form,\n 'success_url':success_url\n })\n \n return render(request, 'web/home/expo_submit.html', context)", "def process_show_form(self, request, step, form):\n pass", "def openTB4Settings(self):\n self.TB4_Window = QtWidgets.QDialog()\n self.TB4_ui = Ui_robotFourConfig()\n self.TB4_ui.setupUi(self.TB4_Window)\n self.TB4_Window.show()", "def button_fac_cob_ent(self):\n invoice = self._fac_ent()\n\n # pagar la factura\n # hacer configuracion para modificar esto\n receipt_obj = self.env['account.voucher.receiptbook']\n receipt = receipt_obj.search([('name', 'like', 'Recibos')], limit=1)\n\n journal = self.journal_id\n res = invoice.invoice_pay_customer()\n context = res['context']\n\n account_voucher_obj = self.env['account.voucher']\n voucher = account_voucher_obj.create({\n 'partner_id': context['default_partner_id'],\n 'journal_id': journal.id,\n 'account_id': journal.default_debit_account_id.id,\n 'type': context['type'],\n 'amount': context['default_amount'],\n 'net_amount': context['default_amount'],\n 'receiptbook_id': receipt.id,\n 'company_id': self.env.user.company_id.id\n })\n voucher.signal_workflow('proforma_voucher')\n\n account_move_line_obj = self.env['account.move.line']\n\n # obtener un recordser vacio\n lines2rec = account_move_line_obj.browse()\n\n # obtener las lineas a conciliar de facturas\n account_move_line = account_move_line_obj.search(\n [('document_number', '=', invoice.document_number)])\n for re in account_move_line:\n if re.account_id.reconcile:\n lines2rec += re\n\n # obtener las lineas a conciliar de pagos\n account_move_line = account_move_line_obj.search(\n [('document_number', '=', voucher.document_number)])\n for re in account_move_line:\n if re.account_id.reconcile:\n lines2rec += re\n\n period_obj = self.env['account.period']\n period = period_obj.find()\n\n # reconciliar las lineas de factura con pagos\n lines2rec.reconcile('manual',\n journal.default_debit_account_id.id, # writeoff_acc_id\n period.id, # writeoff_period_id,\n journal.id) # writeoff_journal_id)\n\n # imprime factura\n datas = {\n 'ids': invoice.ids,\n 'model': 'account.report_invoice',\n 'form': invoice.read()\n }\n return {\n 'type': 'ir.actions.report.xml',\n 'report_name': 'aeroo_report_ar_einvoice',\n 'datas': datas,\n }", "def funding_view(request, slug, id):\n company =get_object_or_404(Company,slug=slug)\n edit = validate_user_company_access_or_redirect(request,company)\n funding_reference = get_object_or_404(Funding, id=id,company=company)\n\n return render_to_response('funding_form.html', \n {'details': funding_reference,'info':funding_reference},\n context_instance=RequestContext(request))", "def action_goto(self):\n dialog = GoToDialog(self)\n dialog.exec()\n\n # Re-focus the main window\n self.activateWindow()", "def dispatch(self, *args, **kwargs):\r\n return super(PaymentFakeView, self).dispatch(*args, **kwargs)", "def on_ConfirmWalletOP_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def click_login_button(self):\n submit_button = self.locate_element_by_css_selector(LOGIN_BUTTON_SELECTPR)\n submit_button.click()", "def OnButtonSubmitterPageButton(self, event):\r\n\t\twebbrowser.open(self._configtmp[\"imageurl\"])", "def click_submit_button(self):\n self.click(by_locator=self.__ASK_QUESTION_PAGE_ASK_QUESTION_BUTTON)", "def send_by_email(self):\r\n ir_model_data = self.env['ir.model.data']\r\n try:\r\n template_id = ir_model_data.get_object_reference(\r\n 'ng_church', 'email_template_church_pledge_report')[1]\r\n except ValueError:\r\n template_id = False\r\n try:\r\n compose_form_id = ir_model_data.get_object_reference(\r\n 'mail', 'email_compose_message_wizard_form')[1]\r\n except ValueError:\r\n compose_form_id = False\r\n ctx = dict(self._context)\r\n ctx.update({\r\n 'default_model': 'church.pledge',\r\n 'default_res_id': self._ids[0],\r\n 'default_use_template': bool(template_id),\r\n 'default_template_id': template_id,\r\n 'default_composition_mode': 'comment',\r\n })\r\n return {\r\n 'name': _('Compose Email'),\r\n 'type': 'ir.actions.act_window',\r\n 'view_type': 'form',\r\n 'view_mode': 'form',\r\n 'res_model': 'mail.compose.message',\r\n 'views': [(compose_form_id, 'form')],\r\n 'view_id': compose_form_id,\r\n 'target': 'new',\r\n 'context': ctx,\r\n }", "def onCheckout(self, controller):\n \n if askokcancel(\"Proceed\", \"Pay the order?\"):\n c = controller.customer\n package = {'customer_id':c.id, 'order_price':c.my_order.GetTotalPrice}\n msg = controller.transmit(package)\n \n if msg['order_received']:\n c.CheckOut(c.my_order.GetTotalPrice)\n c.Clear()\n controller.show_frame(PageThree)", "def input_payment_details(self):\n pass", "def web_payment(request):\n context = {\"token\": request.user.auth_token.key}\n return render(request, \"payment/payment.html\", context=context)", "def gt_helper_clicked(self):\n if not self.gt_helper_open:\n self.gt_helper_open = True\n self.gt_helper.show()", "def payment_transaction(self, acquirer_id):\n cr, uid, context = request.cr, request.uid, request.context\n payment_obj = request.registry.get('payment.acquirer')\n transaction_obj = request.registry.get('payment.transaction')\n order = request.website.sale_get_order(context=context)\n\n if not order or not order.order_line or acquirer_id is None:\n return request.redirect(\"/shop/checkout\")\n\n assert order.partner_id.id != request.website.partner_id.id\n\n # find an already existing transaction\n tx = request.website.sale_get_transaction()\n if tx:\n tx_id = tx.id\n if tx.sale_order_id.id != order.id or tx.state in ['error', 'cancel'] or tx.acquirer_id.id != acquirer_id:\n tx = False\n tx_id = False\n elif tx.state == 'draft': # button cliked but no more info -> rewrite on tx or create a new one ?\n tx.write(dict(transaction_obj.on_change_partner_id(cr, SUPERUSER_ID, None, order.partner_id.id, context=context).get('values', {}), amount=order.amount_total))\n if not tx:\n tx_id = transaction_obj.create(cr, SUPERUSER_ID, {\n 'acquirer_id': acquirer_id,\n 'type': 'form',\n 'amount': order.amount_total,\n 'currency_id': order.pricelist_id.currency_id.id,\n 'partner_id': order.partner_id.id,\n 'partner_country_id': order.partner_id.country_id.id,\n 'reference': request.env['payment.transaction'].get_next_reference(order.name),\n 'sale_order_id': order.id,\n }, context=context)\n request.session['sale_transaction_id'] = tx_id\n tx = transaction_obj.browse(cr, SUPERUSER_ID, tx_id, context=context)\n\n # update quotation\n request.registry['sale.order'].write(\n cr, SUPERUSER_ID, [order.id], {\n 'payment_acquirer_id': acquirer_id,\n 'payment_tx_id': request.session['sale_transaction_id']\n }, context=context)\n\n return payment_obj.render(\n cr, SUPERUSER_ID, tx.acquirer_id.id,\n tx.reference,\n order.amount_total,\n order.pricelist_id.currency_id.id,\n partner_id=order.partner_shipping_id.id or order.partner_invoice_id.id,\n tx_values={\n 'return_url': '/shop/payment/validate',\n },\n context=dict(context, submit_class='btn btn-primary', submit_txt=_('Оформить')))", "def submit(self):\n self.driver.find_element(*BaseLocators.SUBMIT_BUTTON).click()", "def adv_new_window(self):\n adv=workflow.advancedoptions_w.ADialog()\n adv.exec_()", "def action_next(self, cr, uid, ids, context=None):\n survey_obj = self.pool.get('survey')\n search_obj = self.pool.get('ir.ui.view')\n if context is None: context = {}\n\n this = self.browse(cr, uid, ids, context=context)[0]\n survey_id = this.survey_id.id\n context.update({'survey_id': survey_id, 'sur_name_id': this.id})\n cr.execute('select count(id) from survey_history where user_id=%s\\\n and survey_id=%s' % (uid,survey_id))\n\n res = cr.fetchone()[0]\n sur_rec = survey_obj.browse(cr,uid,survey_id,context=context)\n if sur_rec.response_user and res >= sur_rec.response_user:\n raise osv.except_osv(_('Warning!'),_(\"You cannot give response for this survey more than %s times.\") % (sur_rec.response_user))\n\n if sur_rec.max_response_limit and sur_rec.max_response_limit <= sur_rec.tot_start_survey:\n raise osv.except_osv(_('Warning!'),_(\"You cannot give more responses. Please contact the author of this survey for further assistance.\"))\n\n search_id = search_obj.search(cr,uid,[('model','=','survey.question.wiz'),('name','=','Survey Search')])\n return {\n 'view_type': 'form',\n \"view_mode\": 'form',\n 'res_model': 'survey.question.wiz',\n 'type': 'ir.actions.act_window',\n 'target': 'new',\n 'search_view_id': search_id[0],\n 'context': context\n }", "def open_case_number_search_tab(self):\n # Open the frame where the form is hiding\n self._open_form_frame()\n\n # Click into the trial court search\n logger.debug(\"Clicking the case number search tab\")\n self.click(self.locators.CASE_ID_TAB)", "def make_form(self):", "def open_invoices(self):\n return {\n 'domain': \"[('id', 'in', \" + str(self.invoice_ids.ids) + \" )]\",\n 'name': 'Invoices',\n 'view_mode': 'tree,form',\n 'res_model': 'account.move',\n 'type': 'ir.actions.act_window',\n }", "def CreateAccount():\n login_frame.forget()\n self.LoadCreateAccountWindow()", "def checkout(self): \n mtool = getToolByName(self.context, \"portal_membership\")\n ICheckoutManagement(self.context).redirectToNextURL(\"AFTER_START\")", "def __fill_applicant_form(self, profile):\r\n\r\n actions = ActionChains(self.driver)\r\n actions.send_keys(profile['name'] + Keys.TAB + \\\r\n profile['email'] + Keys.TAB + \\\r\n profile['phone_number'] + Keys.TAB)\r\n actions.perform()", "def presssubmitdesign(self):\n self.mode.submitDesign(self.myDesign)", "def initiate_payment(self, order: Order) -> str:\n raise NotImplementedError", "def click_button(self):\n self.q(css='div#fixture button').first.click()", "def show_user_detail_form():\n\n return render_template(\"add-user-details.html\")", "def open(self, acct, password, blockchain, signals):\n self.setAccountHandlers(blockchain, signals)\n self.selectedAccount = self.openAccount = self.acctManager.openAccount(acct, password)\n return self", "def show_form():\n\n return render_template(\"form.html\")", "def open_ride_edit_form(self) -> object:\n self.container.find_element(*self._ride_edit).click()\n\n return EditForm(self).wait_for_component_to_be_present()", "def click_related_list_button(self, heading, button_title):\n self.load_related_list(heading)\n locator = lex_locators[\"record\"][\"related\"][\"button\"].format(\n heading, button_title\n )\n self._jsclick(locator)\n self.wait_until_modal_is_open()", "def submit_order(request, orderid):\n if request.user.is_staff:\n order = WorkingOrder.objects.get(pk=orderid)\n else:\n order = request.user.workingorder_set.get(id=orderid) \n\n if order.status != BaseOrder.Const.DEALER_EDIT:\n return HttpResponseServerError()\n \n # always submit orders in the context of proper account\n account = order.owner.get_profile().account\n \n if request.method == 'GET': \n form = SubmitForm(instance=order)\n else:\n form = SubmitForm(request.POST, instance=order)\n if form.is_valid():\n order = form.save(commit=False)\n cost = order.cost or decimal.Decimal() \n if cost > account.credit_balance:\n ## users account doesn't have enough juice.. send then to the ecom engine \n ## to pay, then get them back here ...\n order = form.save()\n products = [form.cleaned_data['design_product']]\n option = form.cleaned_data.get('processing_option', None)\n if option:\n products.append(option) \n new_cart(request, products)\n request.method = 'GET' \n return paypal_checkout(request, success_url=reverse('submit-order', args=[orderid]))\n else: \n register_design_order(order.owner, order.owner.get_profile().account, order, cost)\n order = form.save(commit=False)\n order.status = BaseOrder.Const.SUBMITTED\n order.submitted = datetime.now()\n order.save()\n # return HttpResponseRedirect('completed_order_summary', args=[orderid]) # TODO\n return HttpResponseRedirect(reverse('submit-order-completed', args=[order.id])) \n return dict(order=order, form=form)", "def get_absolute_url(self):\n return reverse('payment-detail', args=[str(self.id)])", "def btn_create_order_con(self):\n\t\tprint()\n\t\tprint('btn_create_order_con')\n\n\t\t# Init\n\t\t\n\t\t# Search Partner\n\t\tpartner = tre_funcs.get_partner(self, self.patient.name)\n\n\t\t# Search pricelist\n\t\tpricelist = tre_funcs.get_pricelist(self)\n\n\t\t# Search product\n\t\tname = 'CONSULTA MEDICA'\n\t\tprice_list = '2019'\n\t\tproduct = tre_funcs.get_product_product(self, name, price_list)\n\n\t\t# Check \n\t\tproduct_template = tre_funcs.get_product_template(self, name, price_list)\n\t\ttre_funcs.check_product(self, '2019', product, product_template)\n\n\t\t# Create order \n\t\torder = pl_creates.create_order_con(self, partner.id, pricelist.id, product)\n\t\t\n\t\t# Open Order\n\t\treturn action_funcs.open_order(order)", "def __add_credit_menu(self):\n log.debug(\"Displaying __add_credit_menu\")\n # Create a payment methods keyboard\n keyboard = list()\n # Add the supported payment methods to the keyboard\n # Cash\n keyboard.append([telegram.KeyboardButton(self.loc.get(\"menu_cash\"))])\n # Telegram Payments\n if self.cfg.ccard[\"credit_card_token\"] != \"\":\n keyboard.append([telegram.KeyboardButton(self.loc.get(\"menu_credit_card\"))])\n # Keyboard: go back to the previous menu\n keyboard.append([telegram.KeyboardButton(self.loc.get(\"menu_all_cancel\"))])\n # Send the keyboard to the user\n self.bot.send_message(self.chat.id, self.loc.get(\"conversation_payment_method\"),\n reply_markup=telegram.ReplyKeyboardMarkup(keyboard, one_time_keyboard=True))\n # Wait for a reply from the user\n selection = self.__wait_for_specific_message(\n [self.loc.get(\"menu_cash\"), self.loc.get(\"menu_credit_card\"), self.loc.get(\"menu_all_cancel\")],\n cancellable=True)\n # If the user has selected the Cash option...\n if selection == self.loc.get(\"menu_cash\"):\n # Go to the pay with cash function\n self.bot.send_message(self.chat.id,\n self.loc.get(\"payment_cash\", user_cash_id=self.user.identifiable_str()))\n # If the user has selected the Credit Card option...\n elif selection == self.loc.get(\"menu_credit_card\"):\n # Go to the pay with credit card function\n self.__add_credit_cc()\n # If the user has selected the Cancel option...\n elif isinstance(selection, CancelSignal):\n # Send him back to the previous menu\n return", "def show(self):\n self.Show()", "def on_preferencesButton_clicked(self):\n e5App().getObject(\"UserInterface\").showPreferences(\"translatorPage\")" ]
[ "0.61550695", "0.6056384", "0.59469336", "0.5873533", "0.5842952", "0.57555926", "0.57015985", "0.56164473", "0.559442", "0.5588485", "0.5559973", "0.55400896", "0.5487203", "0.5468848", "0.5451458", "0.541463", "0.5409489", "0.540566", "0.5403033", "0.534871", "0.5344379", "0.5337243", "0.53246", "0.53206474", "0.5285212", "0.5278911", "0.5269285", "0.5267306", "0.5246895", "0.5245221", "0.5213659", "0.5212347", "0.52068305", "0.5205542", "0.5195365", "0.51951206", "0.5194576", "0.5187591", "0.5146854", "0.51426154", "0.5134472", "0.5129511", "0.5117968", "0.5117968", "0.511156", "0.5109654", "0.5097346", "0.5091398", "0.50897074", "0.508808", "0.5086506", "0.5075252", "0.50733477", "0.5069487", "0.50673074", "0.50663084", "0.5059435", "0.5039171", "0.5036582", "0.4992335", "0.49912363", "0.4956657", "0.4954123", "0.49523556", "0.4946727", "0.49454764", "0.49375278", "0.4933034", "0.49205762", "0.49124137", "0.49091497", "0.48958507", "0.48945174", "0.48890242", "0.4884634", "0.48795733", "0.48794982", "0.48776734", "0.48768544", "0.4869258", "0.48692", "0.48653844", "0.48590133", "0.48574856", "0.48418382", "0.48356524", "0.4833552", "0.48295945", "0.48292732", "0.48260227", "0.4825045", "0.48225492", "0.481945", "0.48188773", "0.481705", "0.48123083", "0.48112574", "0.4806757", "0.48065895", "0.48048812", "0.48015502" ]
0.0
-1
This button method is used to Change Tenancy state to close.
def landlord_button_close(self): return self.write({'state': 'close'})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def onBtnCloseClicked(self):\n self.close()", "def close_UI(self):", "def _cancel(self, __button):\r\n\r\n self.assistant.destroy()", "def _cancel(self, __button):\r\n\r\n self.assistant.destroy()", "def close(self, state):\n pass", "def close(event):\n event.widget.destroy()", "def click_close_button(self):\n self.click_img(target_img=SETTINGS['img_paths']['buttons']['close'])", "def onBtnCancelClicked(self):\n self.close()", "def OnClose(self, event):\n self.Show(False)", "def close(self):\n self.parent.activate()", "def close(self):\n self.state = False\n self.mainwindow.sendMessage('a')\n print(\"closing \" + self.name)", "def cancelButton(self):\n \n self.answer=\"cancel\"\n self.top.destroy()", "def _cancel(self, __button):\r\n\r\n self.destroy()", "def on_ur_close_launch_btn_clicked(self):\n ur_type = self.ur.urtype\n # print(ur_type)\n close_roslaunch(ur_type)\n self.set_ur_info_txt(\"close launch \" + ur_type )\n # self.ur.Init_node(ur_type)\n self.set_ur_eepos_btns_bool(False)\n self.set_roslaunch_btn(False)\n self.set_ur_related_btns_bool(False)\n # self.ur_launch_btn()", "def _cancel(self, __button):\r\n\r\n self.assistant.destroy()\r\n\r\n return True", "def _cancel(self, __button):\r\n\r\n self.assistant.destroy()\r\n\r\n return True", "def closingState(self):\n pass", "def closingState(self):\n pass", "def OnClose(self, event):\n\t\tself.Show(False)", "def pressCancel(self):\n self.close()", "def click_and_close(self, button_name):\r\n self.clicked = button_name\r\n self.root.destroy()", "def yesButton(self):\n \n self.answer=\"yes\"\n self.top.destroy()", "def close(self):\n self._command = \"close\"", "def closeEvent(self, event):\n self.is_active = False\n app._in_event_loop = False\n super()", "def closeEvent(self, event):\n\n\t\tevent.ignore()\n\t\tself.hide()\n\t\tself.__sys_tray_icon.show()", "def CloseButton(self, visible=True):\r\n \r\n return self.SetFlag(self.buttonClose, visible)", "def onClose(self, event): \n \n self.Destroy()\n return", "def _cancel(self, __button=None):\r\n\r\n self.destroy()", "def handle_close(self):\n self.active = False\n self.close()", "def click_close_modal_content_button(self):\n self._basket.click_close_modal_content_button()", "def __window_close(self):\n pass", "def close(self):\n self.window.destroy()\n self.buttons_window.destroy()", "def on_closebutton_handle_clicked(self, _widget):\n self._terminate.set()\n self.top_widget.destroy()\n self.top_widget = None", "def OnClose(self, event = None):\n ##Close.\n self.Hide()\n self.Destroy()", "def on_cancel_click(self):\r\n\t\t# self.parent.show()\r\n\t\tself.close()", "def cancelButton(self):\n \n self.answer=-1\n self.top.destroy()", "def __minimize_on_click(self):\n self.close()", "def _close(self, event):\n self.EndModal(wx.ID_OK)", "def on_pushButton_only_close_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def close(self):\n self.state = STATE_STOPPED", "def close(self):\n self._normal_close = True\n\n self.cancel()", "def quit_click(self):\n\n self.parent.destroy()", "def Close(self):", "def On_Exit(self):\n GUI_element.On_Exit(self)\n if self.generic_button:\n self.generic_button_text_object.Kill()", "def okButton(self):\n \n self.answer=\"ok\"\n self.top.destroy()", "def closeEvent(self, event):\n aqt.mw.reset()\n super().closeEvent(event)", "def OnClose(self, event):\n self._is_open = False\n wx.PostEvent(self, wxDockPaneClosedEvent())", "def close(self):\n\n self.en_time = time.strftime('%H:%M %A %d %B')\n self.is_active = False", "def close(self):\n \n return self.set_level('down')", "def onExitButtonClicked(self, widget):\n self.getGtkTopObject().close()", "def on_click_cancel(self):\n self.valueChanged.emit(False)\n self.close()", "def closeEvent(self, event):\n if self.testingFlag:\n event.accept()\n\n else:\n close, clearSettings = self.confirmCloseEvent()\n\n if close:\n self.tidyUp()\n\n if clearSettings:\n self.clearSettings()\n\n else:\n self.saveSettings()\n\n event.accept()\n\n else:\n event.ignore()", "def close_pop_up_windows(self):\n self.button_click(self.DECLINE_BUTTON)\n self.button_click(self.CLOSE_POPUP_BUTTON)", "def close(self):\n self._context.state = CLOSED", "def close(self):\n self.Close()", "def close(self):\n self.destroy()", "def on_CloseWallet_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def close(self):\n self.is_open = False", "def on_close_control_com_btn_clicked(self):\n self.control1.device.close_port()\n # self.sensor1.sensor.close_port()\n self.control1.pause()\n self.open_control_com_btn.setEnabled(True)\n self.close_control_com_btn.setEnabled(False)\n self.set_rcr_btns_bool(False)\n self.set_status_txt(\"closing control board \")", "def close(self):\n self.dismiss()\n screenmanager.change_to('main_menu')", "def action_cancel(self):\n self.state = 'canceled'", "def onClose (self):\n \n pass", "def OnExit(self, event):\r\n self.Close(True)", "def close(self):\n\n\t\tself._window.close()", "def createCloseButton(self, parent):\n return Button(parent, Message.LABEL_BUTTON_CLOSE, Icon.ACTION_CLOSE, \n command=self.close)", "def back_clicked(self):\n self.close()", "def OnExit(self, event):\n self.Close(True)", "def OnExit(self, event):\n self.Close(True)", "def OnExit(self, event):\n self.Close(True)", "def OnExit(self, event):\n self.Close(True)", "def on_ur_close_core_btn_clicked(self):\n close_roscore()\n self.set_ur_info_txt(\"close ros.\")\n self.set_roscore_btn(False)", "def close(self):\n print 'close'", "def close(self):\n if not self._close_state.is_set():\n self._close_state.set()", "def close(self):\n self.tl.withdraw()\n self.lumpy.quit()", "def close(self):\n #############################################################################\n # TODO: YOUR CODE HERE #\n #############################################################################\n \n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n super().close()", "def close(self):\n # This is a NOOP by default", "def close(self):\n # This is a NOOP by default", "def _onExit(self, event):\n self.Close(True)", "def close(self):", "def close(self):", "def close(self):", "def close(self):", "def close(self):", "def close(self):", "def close(self):", "def close(self):", "def close(self):", "def close(self):", "def OnClose(self, event):\n self.OnIconize(event, True)", "def close_2(self):\n self.pop_up_amount.destroy()", "def close(self):\n self.closed = True", "def close(self):\n self.props_action.setVisible(False)\n self.cache.last_format = None\n self.cache.last_serial = None\n self.setWindowTitle(\"TCam Capture\")\n self.pixel_coords_label.setText(\"\")\n self.pixel_label.setText(\"\")\n self.current_fps_label.setText(\"\")\n\n if self.props:\n self.props.setParent(None)\n self.props = None\n self.removeDockWidget(self.props)\n\n self.set_device_menus_enabled(False)\n self.setCentralWidget(None)\n self.serial = None\n\n if self.props_widget:\n self.props_widget.stop()\n self.props_widget = None\n\n if self.view is not None:\n self.stop()\n self.view.setParent(None)\n self.view = None\n # update menu to remove mark on open camera\n self.update_device_list(self.device_list)", "def sgnCancel(self):\n\n self.uiCloseWindow()", "def noButton(self):\n \n self.answer=\"no\"\n self.top.destroy()", "def closeEvent(self, event):\n self.is_active = False\n app._in_event_loop = False\n # It seems that interactor closes slower than the windows preventing from properly closing the interface.\n # The work-around is to wait a little bit\n time.sleep(0.1)\n super()", "def click_Exit(self, event):\n exit()", "def close(self):\n # By default, this is a NOOP", "def close(self):\n if self.disable:\n return\n if self._pbar:\n self.close_pbar()\n super().close()", "def push_button_cancel_clicked(self) -> None:\n self._edit_pair = None\n self.close()", "def close(self):\r\n pass" ]
[ "0.7391783", "0.68851864", "0.68595886", "0.68595886", "0.6839893", "0.68002075", "0.6791322", "0.6786216", "0.67538434", "0.67446655", "0.67278045", "0.67195815", "0.66731733", "0.6658879", "0.665314", "0.665314", "0.6634717", "0.6634717", "0.6610405", "0.66013527", "0.65797085", "0.6575164", "0.65700835", "0.6566563", "0.656371", "0.65426725", "0.6532422", "0.6526446", "0.65178496", "0.650545", "0.64637405", "0.6430299", "0.64198756", "0.6407798", "0.64030236", "0.638566", "0.6372831", "0.63619214", "0.63434875", "0.631838", "0.6312664", "0.6311164", "0.6301533", "0.6288196", "0.6287602", "0.6281441", "0.62682676", "0.6261387", "0.6257681", "0.6256628", "0.6255657", "0.623826", "0.6231293", "0.62297857", "0.62258744", "0.62199634", "0.62161785", "0.62158227", "0.61993974", "0.6193659", "0.6192539", "0.6191715", "0.61832136", "0.61719006", "0.6154623", "0.6144754", "0.61379474", "0.61379474", "0.61379474", "0.61379474", "0.6133434", "0.6128394", "0.6105857", "0.61020535", "0.6097426", "0.60917854", "0.60917854", "0.6084192", "0.6082968", "0.6082968", "0.6082968", "0.6082968", "0.6082968", "0.6082968", "0.6082968", "0.6082968", "0.6082968", "0.6082968", "0.60800666", "0.6076291", "0.6073327", "0.60684806", "0.60631734", "0.60540307", "0.6052659", "0.6049732", "0.60464925", "0.6044893", "0.60446525", "0.60413057" ]
0.76498365
0
This button method is used to Change Tenancy state to Cancelled.
def landlord_button_cancel_tenancy(self): for record in self: self.write( {'state': 'cancelled', 'tenancy_cancelled': True}) rent_ids = self.env['tenancy.rent.schedule'].search( [('tenancy_id', '=', record.id), ('paid', '=', False), ('move_check', '=', False)]) for value in rent_ids: value.write({'is_readonly': True}) return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def action_cancel(self):\n self.state = 'canceled'", "def cancel(self):\n self.__canceled = True", "def cancel(self):\n self.cancelled = True", "def cancel(self):\n self.cancelled = True", "def mark_cancelled(self):\n self.status = STATUS_CANCELED", "def on_cancel(self):\n self.state = CANCELED\n self._reject()", "def cancel(self):", "def cancel(self):", "def cancel(self):", "def do_cancel(self):\r\n self.write({'cancelled': True})", "def cancel(self):\n pass", "def cancelButton(self):\n return self.__cancelButton", "def cancelButton(self):\n return self.__cancelButton", "def cancelButton(self):\n return self.__cancelButton", "def _cancel(self, __button):\r\n\r\n self.destroy()", "def cancel(self):\n self.cancelled.set()", "def _cancel(self, __button):\r\n\r\n self.assistant.destroy()", "def _cancel(self, __button):\r\n\r\n self.assistant.destroy()", "def cancel(self): #$NON-NLS-1$\r", "def cancel(self):\n self.on_cancel()", "def cancelButton(self):\n \n self.answer=\"cancel\"\n self.top.destroy()", "def canceled(self):\n return", "def _cancel(self, __button=None):\r\n\r\n self.destroy()", "def _cancel(self, __button):\r\n\r\n self.assistant.destroy()\r\n\r\n return True", "def _cancel(self, __button):\r\n\r\n self.assistant.destroy()\r\n\r\n return True", "def cancel(self):\n return self.RES_OK", "def cancel(self):\n self.succeeded = False\n self.reject()", "def cancel(self):\n self.succeeded = False\n self.reject()", "def cancel(self):\n self.succeeded = False\n self.reject()", "def cancel(self):\n self.succeeded = False\n self.reject()", "def click_cancel(self):\n self.click_element(self.cancel_button_selector)", "def cancel(self):\n self.is_active = False\n self.save()", "def cancel(self):\n self.is_active = False\n self.save()", "def cancel():", "def OnCancel(self, event):\n pass", "def OnCancel(self, event):\n pass", "def cancelButton(self):\n \n self.answer=-1\n self.top.destroy()", "def accept_cancel(self):\n self.ok = False\n self.destroy()", "def on_cancel(self, *args):\n self.response(Gtk.ResponseType.CANCEL)", "def on_cancel(self) -> None:\n pass", "def on_cancel(self) -> None:\n pass", "def canceled(self):\n self.reject()", "def buttonCancel_Clicked( self, event ):\n\t\tself.EndModal(wx.ID_CANCEL)", "def do_uncancel(self):\r\n self.write({'cancelled': False})", "def onCancelButtonClick(self, event):\n self.EndModal(wx.ID_CANCEL)\n event.Skip()", "def cancel(self):\n self.stop()\n self.make_callback('canceled')", "def aborting(self):\n \n pass", "def cancel(self):\n raise NotImplementedError(\n u\"%s: Method not implemented\", self.__class__.__name__)", "def on_click_cancel(self):\n self.valueChanged.emit(False)\n self.close()", "def cancel_dummy(self):\n if self.state != 'authorized':\n self.raise_user_error('cancel_only_authorized')\n else:\n self.state = 'cancel'\n self.save()", "def cancel(self):\n if self.__watcher is not None:\n self.__cancel_task()\n self.__clear_running_state()\n self.setStatusMessage(\"Cancelled\")\n self.summary_text.setText(\n \"<div>Cancelled<br/><small>Press 'Reload' to try again</small></div>\"\n )", "def cancel(self):\n with self.handle_alert(confirm=False):\n self.q(css='button#confirm').first.click()", "def cancel_callback(self):\n pass", "def push_button_cancel_clicked(self) -> None:\n self._edit_pair = None\n self.close()", "def is_canceled(self):\n return self.type_id == STATE_CANCELED", "async def cancel(self, ctx):\n\n return", "def _onButtonCancelClick(self, widget):\n self.delete()", "def presscancel(self):\n self.mode.resetMode()", "def onBtnCancelClicked(self):\n self.close()", "def CallTipCancel(self):\n if self.CallTipActive():\n super(EditraBaseStc, self).CallTipCancel()", "def pressCancel(self):\n self.close()", "def sgnCancel(self):\n\n self.uiCloseWindow()", "def cancel(self):\n GameLoop.getInstance()._cancelation_token = True", "def isCancelled(self): #$NON-NLS-1$\r", "def nd_cancel_clicked(self, widget, data=None):\n self.new_chooser.hide()", "def cancel_brew():\r\n brew_cancelled = False\r\n #Catches batch number entered by user\r\n cancel_batch = request.args.get(\"cancel_number\")\r\n #Searches for list with the batch number entered by the user\r\n for inner_list in current_brewings:\r\n if inner_list[0] == cancel_batch:\r\n current_brewings.remove(inner_list)\r\n cancel_tank = inner_list[5]\r\n brewer_tanks[cancel_tank][\"Batch_Content\"] = \"Nothing\"\r\n brewer_tanks[cancel_tank][\"Activity_Status\"] = \"Nothing\"\r\n brew_cancelled = True\r\n\r\n if brew_cancelled:\r\n cancel_message = \"Brew has been Cancelled\"\r\n else:\r\n cancel_message = (\"This batch number does not exist\" +\r\n \" Brew has not been cancelled\")\r\n\r\n return render_template(\"singular_message.html\",\r\n user_display=cancel_message)", "def cancel(self):\n if not self.is_cancelled:\n self.will_change_value_for('is_cancelled')\n self.cancelled = True\n # remove our dependencies so that we're ready, properly behaved operations\n # will honor the cancel flag\n self.dependencies.clear()\n self.did_change_value_for('is_cancelled')\n \n if not self.is_executing and not self.is_finished:\n with self.changing('is_finished'):\n self.finished = True", "def canceled(self):\n with self._done_condition:\n return self._state == CANCELLED", "def cancel_inner():\n kernel32.SetEvent(cancel_event)", "def on_cancel(self):\n self.quit()", "def on_cancel(self):\n self.quit()", "def on_cancel(self):\n self.quit()", "def on_cancel(self):\n self.quit()", "def on_buttonBox_rejected(self):\n self.reject()", "def reset_button_cancel_handler(obj_response):\n obj_response.html('#message', 'Reset Canceled')\n obj_response.html('#message_table', '')\n obj_response.attr('#message_table', 'style', 'display:none')", "def cancel(self, _=None):\r\n\r\n self.parent.focus_set()\r\n self.temp.set(False) # set boolean variable temp equal to False\r\n self.destroy()", "def setAbortState(*args):\n args[0].Controls.AbortState.abort_state = args[1]", "def alert_cancel(self):\n self._alert_accept_cancel(False)", "def _order_cancel(self, bo):\n log.info(\"bo_blotter: order_cancel bracket order bo#%s\" % bo.ticket) \n cancelled = bo.cancel()\n return(cancelled)", "def cancel(self):\n\n query = f\"scancel {self.jobid}\"\n if self.cluster:\n query = f\"scancel {self.jobid} --clusters={self.cluster}\"\n\n cmd = BuildTestCommand(query)\n cmd.execute()\n logger.debug(f\"Cancelling Job: {self.jobid} by running: {query}\")\n\n self.poll()\n self._state = \"CANCELLED\"", "def _clicked_no_button(self):\n self.yes = False", "def cancel_operation(self):\n # <><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>\n self.proceed = False\n self.entry_view.destroy()", "def cancel(bot, update):\n bot.sendMessage(chat_id=update.message.chat_id, text=\"As you wish, the operation has been cancelled! 😊\")\n return ConversationHandler.END", "def cancel(self):\n\n self.end()\n super().cancel()", "def cancel(self):\n self.session.rollback()", "def canCancel(self) -> bool:\n ...", "def canCancel(self) -> bool:\n ...", "def canCancel(self) -> bool:\n ...", "def canCancel(self) -> bool:\n ...", "def on_cancel(self, _event):\n self.Destroy()\n if self.changes_made:\n self.SetReturnCode(mg.RET_CHANGED_DESIGN)\n else:\n self.SetReturnCode(wx.ID_CANCEL)", "def _cancel(self):\n client = SBusClient(self.storlet_pipe_path)\n try:\n resp = client.cancel(self.task_id)\n if not resp.status:\n raise StorletRuntimeException('Failed to cancel task')\n except SBusClientException:\n raise StorletRuntimeException('Failed to cancel task')", "def click_upload_cancel_button(self):\n self.click_element(self.upload_cancel_button_locator, script_executor=True)", "def is_cancelled(self):\n\n return self._state == \"CANCELLED\"", "def cancel(self, cr, uid, ids, notes='', context=None):\n notes = \"\"\n u = self.browse(cr, uid, ids)[0].user_id.name\n notes = notes +'\\n'+'vehicle Cancelled at : '+time.strftime('%Y-%m-%d') + ' by '+ u \n self.write(cr, uid, ids, {'state':'cancel','notes':notes})\n return True", "def handleCancel(self):\n self.isTerminated = True\n self.terminate()", "def OnButtonAboutCancelButton(self, event):\r\n\t\tself.Hide()", "def control_cancel(self, wait_for_ready: bool = True) -> None:\n self.__logger.debug('Eva.control_cancel called')\n return self.__http_client.control_cancel(wait_for_ready=wait_for_ready)", "def cancel(self):\r\n self.require_item()\r\n\r\n url = '{0}/cancel'.format(self.get_url())\r\n request = http.Request('PUT', url)\r\n request.use_xml = False\r\n\r\n return request, parsers.parse_empty", "def set_status_update_waiter_cancelled(self):\n self.set_state(CHANNEL_MOVE_STATE_CANCELLED)\n self.set_status_update_waiter()", "def is_canceled(self):\n\n if self.status == self.STATUS['CANCELED']:\n return True\n else:\n return False" ]
[ "0.8116585", "0.7312937", "0.73086035", "0.73086035", "0.72557855", "0.717988", "0.7164008", "0.7164008", "0.7164008", "0.715947", "0.7138639", "0.71121854", "0.71121854", "0.71121854", "0.71017605", "0.7070211", "0.7022063", "0.7022063", "0.70071477", "0.69725823", "0.6963773", "0.69266534", "0.6898009", "0.6877686", "0.6877686", "0.68616545", "0.6858884", "0.6858884", "0.6858884", "0.6858884", "0.6813692", "0.6805661", "0.6805661", "0.677077", "0.67223537", "0.67223537", "0.6717971", "0.6627431", "0.66092473", "0.6559512", "0.6559512", "0.6559246", "0.65175444", "0.6465957", "0.6448473", "0.63931566", "0.6377348", "0.63633174", "0.63576496", "0.63544685", "0.63484013", "0.63343793", "0.6324332", "0.6318801", "0.630323", "0.62948054", "0.62910444", "0.6290057", "0.62863874", "0.6284293", "0.6263384", "0.62575036", "0.62437165", "0.6243061", "0.62355876", "0.62292784", "0.6217601", "0.6211137", "0.62017953", "0.6186072", "0.6186072", "0.6186072", "0.6186072", "0.6185205", "0.616843", "0.61580026", "0.6142042", "0.6124499", "0.6124137", "0.6122358", "0.61222935", "0.61183476", "0.6117514", "0.60944754", "0.6091274", "0.60779285", "0.60779285", "0.60779285", "0.60779285", "0.604656", "0.60295486", "0.5999345", "0.59912133", "0.59843117", "0.59773433", "0.5975785", "0.59704876", "0.5966655", "0.5966381", "0.5959413" ]
0.6651149
37
Create invoice for Rent Schedule.
def create_landlord_invoice(self): if self.tenancy_id.is_landlord_rent: account_jrnl_obj = self.env['account.journal'].search( [('type', '=', 'purchase')], limit=1) inv_lines_values = { # 'origin': 'tenancy.rent.schedule', 'name': 'Rent Cost for' + self.tenancy_id.name, 'quantity': 1, 'price_unit': self.amount or 0.00, 'account_id': self.tenancy_id.property_id.account_depreciation_expense_id.id or False, 'analytic_account_id': self.tenancy_id.id or False, } owner_rec = self.tenancy_id.property_owner_id invo_values = { 'partner_id': self.tenancy_id.property_owner_id.id or False, 'type': 'in_invoice', 'invoice_line_ids': [(0, 0, inv_lines_values)], 'property_id': self.tenancy_id.property_id.id or False, 'invoice_date': self.start_date or False, # 'account_id': owner_rec.property_account_payable_id.id, # 'schedule_id': self.id, 'new_tenancy_id': self.tenancy_id.id, 'journal_id': account_jrnl_obj.id or False } acc_id = self.env['account.move'].with_context({'default_type': 'in_invoice'}).create(invo_values) self.write({'invc_id': acc_id.id, 'inv': True}) wiz_form_id = self.env['ir.model.data'].get_object_reference( 'account', 'view_move_form')[1] return { 'view_type': 'form', 'view_id': wiz_form_id, 'view_mode': 'form', 'res_model': 'account.move', 'res_id': self.invc_id.id, 'type': 'ir.actions.act_window', 'target': 'current', 'context': self._context, }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _create_invoice(self):\n self.ensure_one()\n partner = self.member_id.partner_id\n invoice = self.env['account.invoice'].create({\n 'partner_id': partner.id,\n 'account_id': partner.property_account_receivable_id.id,\n 'fiscal_position_id': partner.property_account_position_id.id\n })\n for line in self.line_ids:\n product = line.activity_id.product_id\n # Handling of invoice lines : needs cache record for onchange, then\n # real writing...\n invoice_line = self.env['account.invoice.line'].new({\n 'product_id': product.id,\n 'invoice_id': invoice.id\n })\n invoice_line._onchange_product_id()\n line_values = dict(invoice_line._cache)\n line_values['price_unit'] = line.price\n invoice_line = self.env['account.invoice.line'].create(line_values)\n invoice.compute_taxes()\n line.registration_id.invoice_line_id = invoice_line.id\n return invoice", "def action_create_invoice(self):\n if self.partner_id:\n supplier = self.partner_id\n else:\n supplier = self.partner_id.search(\n [(\"name\", \"=\", \"Salon Default Customer\")])\n lines = []\n product_id = self.env['product.product'].search(\n [(\"name\", \"=\", \"Salon Service\")])\n for records in self.order_line_ids:\n if product_id.property_account_income_id.id:\n income_account = product_id.property_account_income_id.id\n elif product_id.categ_id.property_account_income_categ_id.id:\n income_account = product_id.categ_id.\\\n property_account_income_categ_id.id\n else:\n raise UserError(\n _(\"Please define income account for this product: \"\n \"'%s' (id:%d).\") % (product_id.name, product_id.id))\n value = (0, 0, {\n 'name': records.service_id.name,\n 'account_id': income_account,\n 'price_unit': records.price,\n 'quantity': 1,\n 'product_id': product_id.id,\n })\n lines.append(value)\n invoice_line = {\n 'move_type': 'out_invoice',\n 'partner_id': supplier.id,\n 'invoice_user_id': self.env.user.id,\n 'invoice_origin': self.name,\n 'invoice_line_ids': lines,\n }\n inv = self.env['account.move'].create(invoice_line)\n action = self.env.ref('account.action_move_out_invoice_type',\n raise_if_not_found=False)\n result = {\n 'name': action.name,\n 'type': 'ir.actions.act_window',\n 'views': [[False, 'form']],\n 'target': 'current',\n 'res_id': inv.id,\n 'res_model': 'account.move',\n }\n self.inv_stage_identifier = True\n self.stage_id = 3\n invoiced_records = self.env['salon.order'].search(\n [('stage_id', 'in', [3, 4]), ('chair_id', '=', self.chair_id.id)])\n total = 0\n for rows in invoiced_records:\n invoiced_date = str(rows.date)\n invoiced_date = invoiced_date[0:10]\n if invoiced_date == str(date.today()):\n total = total + rows.price_subtotal\n self.chair_id.collection_today = total\n self.update_number_of_orders()\n return result", "def make_invoices(self):\n for invoice in self.policy.invoices:\n db.session.delete(invoice)\n db.session.commit()\n\n billing_schedules = {'Annual': None, 'Semi-Annual': 3, 'Quarterly': 4, 'Monthly': 12}\n\n invoices = []\n first_invoice = Invoice(self.policy.id,\n self.policy.effective_date, # bill_date\n self.policy.effective_date + relativedelta(months=1), # due\n self.policy.effective_date + relativedelta(months=1, days=14), # cancel\n self.policy.annual_premium)\n invoices.append(first_invoice)\n\n if self.policy.billing_schedule == \"Annual\":\n pass\n elif self.policy.billing_schedule == \"Two-Pay\":\n first_invoice.amount_due = first_invoice.amount_due / billing_schedules.get(self.policy.billing_schedule)\n for i in range(1, billing_schedules.get(self.policy.billing_schedule)):\n months_after_eff_date = i*6\n bill_date = self.policy.effective_date + relativedelta(months=months_after_eff_date)\n invoice = Invoice(self.policy.id,\n bill_date,\n bill_date + relativedelta(months=1),\n bill_date + relativedelta(months=1, days=14),\n self.policy.annual_premium / billing_schedules.get(self.policy.billing_schedule))\n invoices.append(invoice)\n elif self.policy.billing_schedule == \"Quarterly\":\n first_invoice.amount_due = first_invoice.amount_due / billing_schedules.get(self.policy.billing_schedule)\n for i in range(1, billing_schedules.get(self.policy.billing_schedule)):\n months_after_eff_date = i*3\n bill_date = self.policy.effective_date + relativedelta(months=months_after_eff_date)\n invoice = Invoice(self.policy.id,\n bill_date,\n bill_date + relativedelta(months=1),\n bill_date + relativedelta(months=1, days=14),\n self.policy.annual_premium / billing_schedules.get(self.policy.billing_schedule))\n invoices.append(invoice)\n elif self.policy.billing_schedule == \"Monthly\":\n first_invoice.amount_due = first_invoice.amount_due / billing_schedules.get(self.policy.billing_schedule)\n for i in range(1, billing_schedules.get(self.policy.billing_schedule)):\n months_after_eff_date = i\n bill_date = self.policy.effective_date + relativedelta(months=months_after_eff_date)\n invoice = Invoice(self.policy.id,\n bill_date,\n bill_date + relativedelta(months=1),\n bill_date + relativedelta(months=1, days=14),\n self.policy.annual_premium / billing_schedules.get(self.policy.billing_schedule))\n invoices.append(invoice)\n else:\n print \"You have chosen a bad billing schedule.\"\n\n logger.info(str(len(invoices)) + \" invoices generated for policy %s\" % self.policy.id)\n\n for invoice in invoices:\n db.session.add(invoice)\n db.session.commit()", "def create_invoice(self):\n for line in self:\n # if not line.account_id:\n # raise UserError(_('Please Add the incoming Account !!'))\n self.ensure_one()\n journal_id = self.env['account.journal'].search([\n ('type', '=', 'sale')], limit=1)\n inv_line_main = {\n 'name': line.description.name,\n 'price_unit': line.amount or 0.00,\n 'quantity': 1,\n 'discount': line.discount,\n 'account_id': line.description.property_account_income_id.id or line.description.categ_id.property_account_income_categ_id.id or False,\n }\n inv_values = {\n 'partner_id': line.patient_id.partner_id.id,\n 'patient_id': line.patient_id.id,\n 'dentist': line.dentist.id,\n 'move_type': 'out_invoice',\n 'invoice_date': datetime.now().strftime(DF) or False,\n 'journal_id': journal_id and journal_id.id or False,\n 'teeth_id': line.patient_id and line.patient_id.id or False,\n }\n acc_id = self.env['account.move'].create(inv_values)\n acc_id.write({'invoice_line_ids': [(0, 0, inv_line_main)]})\n\n self.write({'invc_id': acc_id.id, 'inv': True})\n context = dict(self._context or {})\n wiz_form_id = self.env['ir.model.data'].get_object_reference(\n 'account', 'view_move_form')[1]\n\n return {\n 'view_type': 'form',\n 'view_id': wiz_form_id,\n 'view_mode': 'form',\n 'res_model': 'account.move',\n 'res_id': self.invc_id.id,\n 'type': 'ir.actions.act_window',\n 'target': 'current',\n 'context': context,\n }", "def action_invoice_create(self, cr, uid, ids, context=None):\n res = False\n\n journal_obj = self.pool.get('account.journal')\n inv_obj = self.pool.get('account.invoice')\n inv_line_obj = self.pool.get('account.invoice.line')\n fiscal_obj = self.pool.get('account.fiscal.position')\n\n for order in self.browse(cr, uid, ids, context=context):\n# pay_acc_id = order.partner_id.property_account_payable.id\n #use a new method to get the account_id\n pay_acc_id = self._get_inv_pay_acc_id(cr,uid,order) \n journal_ids = journal_obj.search(cr, uid, [('type', '=','purchase'),('company_id', '=', order.company_id.id)], limit=1)\n if not journal_ids:\n raise osv.except_osv(_('Error!'),\n _('Define purchase journal for this company: \"%s\" (id:%d).') % (order.company_id.name, order.company_id.id))\n\n # generate invoice line correspond to PO line and link that to created invoice (inv_id) and PO line\n inv_lines = []\n for po_line in order.order_line:\n #check if this line have quantity to generate invoice, by johnw\n if po_line.product_qty <= po_line.invoice_qty:\n continue \n# if po_line.product_id:\n# acc_id = po_line.product_id.property_account_expense.id\n# if not acc_id:\n# acc_id = po_line.product_id.categ_id.property_account_expense_categ.id\n# if not acc_id:\n# raise osv.except_osv(_('Error!'), _('Define expense account for this company: \"%s\" (id:%d).') % (po_line.product_id.name, po_line.product_id.id,))\n# else:\n# acc_id = property_obj.get(cr, uid, 'property_account_expense_categ', 'product.category').id \n #use a new method to get the account_id, by johnw \n acc_id = self._get_inv_line_exp_acc_id(cr,uid,order,po_line)\n fpos = order.fiscal_position or False\n acc_id = fiscal_obj.map_account(cr, uid, fpos, acc_id)\n\n inv_line_data = self._prepare_inv_line(cr, uid, acc_id, po_line, context=context)\n #update the quantity to the quantity, by johnw\n inv_line_data.update({'quantity':(po_line.product_qty - po_line.invoice_qty)})\n inv_line_id = inv_line_obj.create(cr, uid, inv_line_data, context=context)\n inv_lines.append(inv_line_id)\n\n po_line.write({'invoiced':True, 'invoice_lines': [(4, inv_line_id)]}, context=context)\n \n #if no lines then return direct, by johnw\n if len(inv_lines) == 0:\n continue\n \n # get invoice data and create invoice\n inv_data = {\n 'name': order.partner_ref or order.name,\n 'reference': order.partner_ref or order.name,\n 'account_id': pay_acc_id,\n 'type': 'in_invoice',\n 'partner_id': order.partner_id.id,\n 'currency_id': order.pricelist_id.currency_id.id,\n 'journal_id': len(journal_ids) and journal_ids[0] or False,\n 'invoice_line': [(6, 0, inv_lines)],\n 'origin': order.name,\n 'fiscal_position': order.fiscal_position.id or False,\n 'payment_term': order.payment_term_id.id or False,\n 'company_id': order.company_id.id,\n }\n inv_id = inv_obj.create(cr, uid, inv_data, context=context)\n\n # compute the invoice\n inv_obj.button_compute(cr, uid, [inv_id], context=context, set_total=True)\n\n # Link this new invoice to related purchase order\n order.write({'invoice_ids': [(4, inv_id)]}, context=context)\n res = inv_id\n return res", "def create_invoice(self):\n sales_tax = 0.06\n item_sum = 0\n inv = f'Invoice#: {self.invoice_id}\\n'\n for key, value in self.items_with_price.items():\n item_sum += value\n inv += f'{key}.....${value:.2f}\\n'\n\n tax = item_sum * sales_tax\n inv += f'Tax.....${tax:.2f}\\n'\n inv += f'Total.....${tax + item_sum:.2f}'\n # print(inv)\n # returning for unit testing purposes\n return inv", "def action_invoice_create(self, cr, uid, ids, grouped=False, states=None, date_invoice=False, context=None):\n order = self.browse(cr, uid, ids[0], context=context)\n inv_obj = self.pool.get('account.invoice')\n # create the invoice\n inv_id = super(sale_order, self).action_invoice_create(cr, uid, ids, grouped, states, date_invoice, context=context)\n # modify the invoice\n inv_obj.write(cr, uid, [inv_id], {'past_doc': order.past_doc})\n return inv_id", "def create_invoice(invoice: Invoice, callback_url: Optional[HttpUrl] = None):\n # Send the invoice, collect the money, send the notification (the callback)\n return {\"msg\": \"Invoice received\"}", "def create_invoice(sender, invoice, issuer_details, **kwargs):\n if not invoice.items:\n return\n\n price = sum([item.price for item in invoice.items.all()])\n\n if not price:\n return\n\n paypal_invoice = models.Invoice(\n customer=invoice.customer,\n year=invoice.year,\n month=invoice.month,\n invoice_date=invoice.invoice_date,\n end_date=invoice.due_date,\n tax_percent=invoice.tax_percent,\n issuer_details=issuer_details,\n )\n\n paypal_invoice.payment_details = {\n 'name': invoice.customer.name,\n 'address': invoice.customer.address,\n 'country': invoice.customer.country,\n 'country_name': invoice.customer.get_country_display(),\n 'email': invoice.customer.email,\n 'postal': invoice.customer.postal,\n 'phone_number': invoice.customer.phone_number,\n 'bank_name': invoice.customer.bank_name,\n 'bank_account': invoice.customer.bank_account,\n }\n\n paypal_invoice.save()\n\n for item in invoice.items.all():\n models.InvoiceItem.objects.create(\n invoice=paypal_invoice,\n price=item.price,\n tax=item.tax,\n quantity=item.quantity,\n unit_price=item.unit_price,\n unit_of_measure=helpers.convert_unit_of_measure(item.unit),\n name=item.name,\n start=item.start,\n end=item.end,\n )", "def _create_payments(self, invoice):\n self.ensure_one()\n if self.schedule_id and self.schedule_id.occurences > 0:\n # TODO: make more intelligent price cut\n amount = invoice.amount_total\n amount_per_occurence = amount / self.schedule_id.occurences\n for day in self.schedule_id.day_ids:\n payment = self.env['account.payment'].new({\n 'payment_type': 'inbound',\n 'partner_type': 'customer',\n 'partner_id': self.member_id.partner_id.id,\n 'amount': amount_per_occurence,\n 'payment_date': day.day,\n 'journal_id': self.journal_id.id,\n })\n payment._onchange_journal()\n payment_values = dict(payment._cache)\n payment = self.env['account.payment'].create(payment_values)\n payment.invoice_ids = [(4, invoice.id, False)]", "def action_invoice_create(self, grouped=False, final=False):\n inv_obj = self.env['account.invoice']\n precision = self.env['decimal.precision'].sudo().precision_get('Product Unit of Measure')\n invoices = {}\n references = {}\n for order in self:\n group_key = order.id if grouped else (order.partner_invoice_id.id, order.currency_id.id)\n for line in order.order_line.sorted(key=lambda l: l.qty_to_invoice < 0):\n if float_is_zero(line.qty_to_invoice, precision_digits=precision):\n continue\n if group_key not in invoices:\n inv_data = order._prepare_invoice()\n invoice = inv_obj.sudo().create(inv_data)\n references[invoice] = order\n invoices[group_key] = invoice\n invoice['sale_order_id'] = order.id\n elif group_key in invoices:\n vals = {}\n if order.name not in invoices[group_key].origin.split(', '):\n vals['origin'] = invoices[group_key].origin + ', ' + order.name\n if order.client_order_ref and order.client_order_ref not in invoices[group_key].name.split(\n ', ') and order.client_order_ref != invoices[group_key].name:\n vals['name'] = invoices[group_key].name + ', ' + order.client_order_ref\n invoices[group_key].sudo().write(vals)\n if line.qty_to_invoice > 0:\n line.invoice_line_create(invoices[group_key].id, line.qty_to_invoice)\n elif line.qty_to_invoice < 0 and final:\n line.invoice_line_create(invoices[group_key].id, line.qty_to_invoice)\n\n if references.get(invoices.get(group_key)):\n if order not in references[invoices[group_key]]:\n references[invoices[group_key]] |= order\n if not invoices:\n raise UserError(_('There is no invoiceable line.'))\n for invoice in invoices.values():\n if not invoice.invoice_line_ids:\n raise UserError(_('There is no invoiceable line.'))\n # If invoice is negative, do a refund invoice instead\n if invoice.amount_untaxed < 0:\n invoice.type = 'out_refund'\n for line in invoice.invoice_line_ids:\n line.quantity = -line.quantity\n # Use additional field helper function (for account extensions)\n for line in invoice.invoice_line_ids:\n line._set_additional_fields(invoice)\n # Necessary to force computation of taxes. In account_invoice, they are triggered\n # by onchanges, which are not triggered when doing a create.\n invoice.compute_taxes()\n invoice.message_post_with_view('mail.message_origin_link',\n values={'self': invoice, 'origin': references[invoice]},\n subtype_id=self.env.ref('mail.mt_note').id)\n return [inv.id for inv in invoices.values()]", "def invoice(self, start, end):\n\n if self.invoice_type is None:\n invoice_type = self.conn.config[\"main\"][\"invoice:object\"]\n if \":\" not in invoice_type:\n raise AttributeError(\"Invoice configuration incorrect! %s\" % invoice_type)\n module, call = invoice_type.split(\":\")\n _package = __import__(module, globals(), locals(), [ call ])\n\n funct = getattr(_package, call)\n self.invoice_type = funct\n config = self.conn.config[\"invoice_object\"]\n invoice = self.invoice_type(self, config)\n return invoice", "def action_invoice_create(self, cr, uid, ids, context=None):\n res = False\n\n journal_obj = self.pool.get('account.journal')\n inv_obj = self.pool.get('account.invoice')\n inv_line_obj = self.pool.get('account.invoice.line')\n fiscal_obj = self.pool.get('account.fiscal.position')\n property_obj = self.pool.get('ir.property')\n\n for order in self.browse(cr, uid, ids, context=context):\n pay_acc_id = order.partner_id.property_account_payable.id\n journal_ids = journal_obj.search(cr, uid, [('type', '=','purchase'),('company_id', '=', order.company_id.id)], limit=1)\n if not journal_ids:\n raise osv.except_osv(_('Error !'),\n _('There is no purchase journal defined for this company: \"%s\" (id:%d)') % (order.company_id.name, order.company_id.id))\n\n # generate invoice line correspond to PO line and link that to created invoice (inv_id) and PO line\n inv_lines = []\n for po_line in order.order_line:\n if po_line.product_id:\n acc_id = po_line.product_id.product_tmpl_id.property_account_expense.id\n if not acc_id:\n acc_id = po_line.product_id.categ_id.property_account_expense_categ.id\n if not acc_id:\n raise osv.except_osv(_('Error !'), _('There is no expense account defined for this product: \"%s\" (id:%d)') % (po_line.product_id.name, po_line.product_id.id,))\n else:\n acc_id = property_obj.get(cr, uid, 'property_account_expense_categ', 'product.category').id\n fpos = order.fiscal_position or False\n acc_id = fiscal_obj.map_account(cr, uid, fpos, acc_id)\n\n inv_line_data = self._prepare_inv_line(cr, uid, acc_id, po_line, context=context)\n inv_line_id = inv_line_obj.create(cr, uid, inv_line_data, context=context)\n inv_lines.append(inv_line_id)\n\n po_line.write({'invoiced':True, 'invoice_lines': [(4, inv_line_id)]}, context=context)\n\n # get invoice data and create invoice\n inv_data = {\n 'name': order.partner_ref or order.name,\n 'reference': order.partner_ref or order.name,\n 'account_id': pay_acc_id,\n 'type': 'in_invoice',\n 'partner_id': order.partner_id.id,\n 'currency_id': order.pricelist_id.currency_id.id,\n 'address_invoice_id': order.partner_address_id.id,\n 'address_contact_id': order.partner_address_id.id,\n 'journal_id': len(journal_ids) and journal_ids[0] or False,\n 'invoice_line': [(6, 0, inv_lines)], \n 'origin': order.name,\n 'fiscal_position': order.fiscal_position.id or order.partner_id.property_account_position.id,\n 'payment_term': order.partner_id.property_payment_term and order.partner_id.property_payment_term.id or False,\n 'company_id': order.company_id.id,\n 'add_disc': order.add_disc or 0.0\n }\n inv_id = inv_obj.create(cr, uid, inv_data, context=context)\n\n # compute the invoice\n inv_obj.button_compute(cr, uid, [inv_id], context=context, set_total=True)\n\n # Link this new invoice to related purchase order\n order.write({'invoice_ids': [(4, inv_id)]}, context=context)\n res = inv_id\n return res", "def action_invoice_create(self, grouped=False, final=False):\n if self.invoice_option == 'before_delivery':\n inv_obj = self.env['account.invoice']\n for order in self:\n inv_data = order._prepare_invoice()\n invoice = inv_obj.create(inv_data)\n for inv_line in order.order_line:\n inv_line.invoice_line_create(invoice.id, inv_line.product_uom_qty)\n\n else:\n inv_obj = self.env['account.invoice']\n precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')\n invoices = {}\n references = {}\n invoices_origin = {}\n invoices_name = {}\n\n # Keep track of the sequences of the lines\n # To keep lines under their section\n inv_line_sequence = 0\n for order in self:\n group_key = order.id if grouped else (order.partner_invoice_id.id, order.currency_id.id)\n\n # We only want to create sections that have at least one invoiceable line\n pending_section = None\n\n # Create lines in batch to avoid performance problems\n line_vals_list = []\n # sequence is the natural order of order_lines\n for line in order.order_line:\n if line.display_type == 'line_section':\n pending_section = line\n continue\n if float_is_zero(line.qty_to_invoice, precision_digits=precision):\n continue\n if group_key not in invoices:\n inv_data = order._prepare_invoice()\n invoice = inv_obj.create(inv_data)\n references[invoice] = order\n invoices[group_key] = invoice\n invoices_origin[group_key] = [invoice.origin]\n invoices_name[group_key] = [invoice.name]\n elif group_key in invoices:\n if order.name not in invoices_origin[group_key]:\n invoices_origin[group_key].append(order.name)\n if order.client_order_ref and order.client_order_ref not in invoices_name[group_key]:\n invoices_name[group_key].append(order.client_order_ref)\n\n if line.qty_to_invoice > 0 or (line.qty_to_invoice < 0 and final):\n if pending_section:\n section_invoice = pending_section.invoice_line_create_vals(\n invoices[group_key].id,\n pending_section.qty_to_invoice\n )\n inv_line_sequence += 1\n section_invoice[0]['sequence'] = inv_line_sequence\n line_vals_list.extend(section_invoice)\n pending_section = None\n\n inv_line_sequence += 1\n inv_line = line.invoice_line_create_vals(\n invoices[group_key].id, line.qty_to_invoice\n )\n inv_line[0]['sequence'] = inv_line_sequence\n line_vals_list.extend(inv_line)\n\n if references.get(invoices.get(group_key)):\n if order not in references[invoices[group_key]]:\n references[invoices[group_key]] |= order\n\n self.env['account.invoice.line'].create(line_vals_list)\n\n for group_key in invoices:\n invoices[group_key].write({'name': ', '.join(invoices_name[group_key]),\n 'origin': ', '.join(invoices_origin[group_key])})\n sale_orders = references[invoices[group_key]]\n if len(sale_orders) == 1:\n invoices[group_key].reference = sale_orders.reference\n\n if not invoices:\n raise UserError(_(\n 'There is no invoiceable line. If a product has a Delivered quantities invoicing policy, please make sure that a quantity has been delivered.'))\n\n for invoice in invoices.values():\n invoice.compute_taxes()\n if not invoice.invoice_line_ids:\n raise UserError(_(\n 'There is no invoiceable line. If a product has a Delivered quantities invoicing policy, please make sure that a quantity has been delivered.'))\n # If invoice is negative, do a refund invoice instead\n if invoice.amount_total < 0:\n invoice.type = 'out_refund'\n for line in invoice.invoice_line_ids:\n line.quantity = -line.quantity\n # Use additional field helper function (for account extensions)\n for line in invoice.invoice_line_ids:\n line._set_additional_fields(invoice)\n # Necessary to force computation of taxes. In account_invoice, they are triggered\n # by onchanges, which are not triggered when doing a create.\n invoice.compute_taxes()\n # Idem for partner\n so_payment_term_id = invoice.payment_term_id.id\n fp_invoice = invoice.fiscal_position_id\n invoice._onchange_partner_id()\n invoice.fiscal_position_id = fp_invoice\n # To keep the payment terms set on the SO\n invoice.payment_term_id = so_payment_term_id\n invoice.message_post_with_view('mail.message_origin_link',\n values={'self': invoice, 'origin': references[invoice]},\n subtype_id=self.env.ref('mail.mt_note').id)\n return [inv.id for inv in invoices.values()]", "def action_create_invoices(self, data):\n invoice_obj = self.env['account.invoice']\n values = {}\n for val in data:\n values.setdefault(val['invoice_type'], {\n 'order': val.get('sale', val.get('purchase')),\n 'values': []\n })\n values[val['invoice_type']]['values'].append((0, 0, val['values']))\n\n for inv_type, inv_data in values.items():\n invoice = invoice_obj.new(self._prepare_invoice(inv_type))\n invoice._onchange_partner_id()\n inv = invoice._convert_to_write({\n name: invoice[name] for name in invoice._cache\n })\n for _, _, line in inv_data['values']:\n line['account_id'] = inv['account_id']\n inv['invoice_line_ids'] = inv_data['values']\n new_invoice = invoice_obj.sudo().create(inv)\n new_invoice.action_invoice_open()\n inv_data['order'].write({\n 'exchange_invoice_ids': [(4, new_invoice.id)]\n })", "def action_invoice_create(self, grouped=False, final=False):\n inv_obj = self.env['account.invoice']\n precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')\n invoices = {}\n references = {}\n invoices_origin = {}\n invoices_name = {}\n for order in self:\n group_key = order.id if grouped else (order.partner_id.id, order.currency_id.id)\n for line in order.order_line.sorted(key=lambda l: l.qty_received - l.qty_invoiced < 0):\n if float_is_zero(line.qty_received - line.qty_invoiced, precision_digits=precision):\n continue\n if group_key not in invoices:\n inv_data = order._prepare_invoice()\n invoice = inv_obj.create(inv_data)\n references[invoice] = order\n invoices[group_key] = invoice\n invoices_origin[group_key] = [invoice.origin]\n invoices_name[group_key] = [invoice.name]\n elif group_key in invoices:\n if order.name not in invoices_origin[group_key]:\n invoices_origin[group_key].append(order.name)\n if order.partner_ref and order.partner_ref not in invoices_name[group_key]:\n invoices_name[group_key].append(order.partner_ref)\n\n if line.qty_received - line.qty_invoiced > 0:\n line.invoice_line_create(invoices[group_key].id, line.qty_received - line.qty_invoiced)\n elif line.qty_received - line.qty_invoiced < 0 and final:\n line.invoice_line_create(invoices[group_key].id, line.qty_received - line.qty_invoiced)\n\n if references.get(invoices.get(group_key)):\n if order not in references[invoices[group_key]]:\n references[invoices[group_key]] |= order\n\n for group_key in invoices:\n invoices[group_key].write({'name': ', '.join(invoices_name[group_key]),\n 'origin': ', '.join(invoices_origin[group_key])})\n\n if not invoices:\n raise UserError(_('There is no invoicable line.'))\n\n for invoice in invoices.values():\n if not invoice.invoice_line_ids:\n raise UserError(_('There is no invoicable line.'))\n # If invoice is negative, do a refund invoice instead\n if invoice.amount_total < 0:\n invoice.type = 'in_refund'\n for line in invoice.invoice_line_ids:\n line.quantity = -line.quantity\n # Necessary to force computation of taxes. In account_invoice, they are triggered\n # by onchanges, which are not triggered when doing a create.\n invoice.compute_taxes()\n invoice.message_post_with_view('mail.message_origin_link',\n values={'self': invoice, 'origin': references[invoice]},\n subtype_id=self.env.ref('mail.mt_note').id)\n return [inv.id for inv in invoices.values()]", "def create_invoice(self, order): # noqa:max-complexity=18\n\n if len(order['order_lines']) == 0:\n raise RuntimeError(\n \"Expected 1 order_lines in order {}, got: {}\".format(\n order['order_id'],\n order['order_lines']\n )\n )\n\n order_id = order['order_id']\n\n refund = False\n if order['state'] == 'REFUND':\n refund = True\n self.stdout.write(self.style.WARNING(\"Refunded order: {}\".format(order_id)))\n elif order['state'] == 'PAID':\n pass\n else:\n self.stdout.write(self.style.WARNING(\"Not processing unknown order state {} for: {}\".format(order['state'], order_id)))\n return\n\n if self.only_known and order_id not in billy.TICKETBUTLER_IGNORE_LIST:\n self.stdout.write(self.style.WARNING(\"Only processing known invoices, skipping {}\".format(order_id)))\n return\n\n # Object containing all created tickets, to have an invoice relation\n # appended later\n ticketbutler_tickets = []\n\n for ticket in order['tickets']:\n\n sprints = list(filter(\n lambda q: q['question'] == 148,\n ticket['answers']\n ))[0]\n\n if any(filter(lambda c: c['choice_heading'].lower() == 'no', sprints['answered_choices'])):\n sprints = models.TicketbutlerTicket.SPRINTS_NO\n elif any(filter(lambda c: c['choice_heading'].lower() == 'maybe', sprints['answered_choices'])):\n sprints = models.TicketbutlerTicket.SPRINTS_MAYBE\n elif any(filter(lambda c: c['choice_heading'].lower() == 'yes', sprints['answered_choices'])):\n sprints = models.TicketbutlerTicket.SPRINTS_YES\n\n ticketbutler_ticket = models.TicketbutlerTicket.get_or_create(\n ticket['email'],\n ticket['full_name'],\n order_id,\n sprints,\n ticket['ticket_type_name'],\n )\n if refund:\n self.stdout.write(self.style.WARNING(\"This ticket was marked refunded: {}\".format(order_id)))\n ticketbutler_ticket.refunded = True\n ticketbutler_ticket.save()\n else:\n ticketbutler_ticket.refunded = False\n ticketbutler_ticket.save()\n\n ticketbutler_tickets.append(ticketbutler_ticket)\n\n if refund:\n self.stdout.write(self.style.WARNING(\"Skipping refunded order: {}\".format(order_id)))\n return\n\n # If an email is changed on a TicketButler ticket and an old user exists without any other tickets,\n # then disable this user's account and delete the ticket from the system\n all_order_tickets = models.TicketbutlerTicket.objects.filter(ticketbutler_orderid=order_id)\n\n for ticket in order['tickets']:\n\n for verify_ticket in all_order_tickets:\n # Check if the ticket is active in the current order, if it is\n # then skip it.\n if any(active.id == verify_ticket.id for active in ticketbutler_tickets):\n continue\n # Yeah, it's not active anymore, so delete it and potentially\n # disable the user account\n inactive_ticket = verify_ticket\n self.stdout.write(self.style.WARNING(\"Going to remove ticket for {}, order_id: {}\".format(inactive_ticket.user.email, order_id)))\n if inactive_ticket.user.tickets.all().exclude(id=inactive_ticket.id).exists():\n # Just remove the ticket\n self.stdout.write(self.style.WARNING(\"Found another ticket for user {} and deleted the inactive ticket in question but not the user\".format(inactive_ticket.user.email)))\n if inactive_ticket.pk:\n inactive_ticket.delete()\n continue\n else:\n # Remove the user account too if there are no submissions and it's not a superuser\n if not inactive_ticket.user.is_superuser and not inactive_ticket.user.submissions.all().exists():\n if inactive_ticket.user.is_active:\n self.stdout.write(self.style.WARNING(\"Also disabling user account for: {}\".format(inactive_ticket.user.email)))\n inactive_ticket.user.is_active = False\n inactive_ticket.user.save()\n else:\n self.stdout.write(self.style.WARNING(\"User was already inactive: {}\".format(inactive_ticket.user.email)))\n # In case the user had several tickets, and one of them was already deleted\n if inactive_ticket.pk:\n inactive_ticket.delete()\n\n if 'discount' in order:\n if order['discount']['amount'] == 100:\n\n for ticket in ticketbutler_tickets:\n ticket.free_ticket = True\n ticket.save()\n\n self.stdout.write(self.style.SUCCESS(\"Skipping invoice for free ticket for order id: {}\".format(order_id)))\n return\n else:\n self.stdout.write(self.style.ERROR(\"!!! Order id {} will have an invoice generated with missing information, Ticketbutler said the discount was: {}\".format(order_id, order['discount']['amount'])))\n\n for ticketbutler_order_line_no, order_line in enumerate(order['order_lines']):\n\n self.process_order_line(order, order_line, ticketbutler_tickets, ticketbutler_order_line_no=ticketbutler_order_line_no)", "def _prepare_invoice(self):\n # get current logged in user's timezone\n local = pytz.timezone(self.env['res.users'].browse(self._uid).tz) or pytz.utc\n\n self.ensure_one()\n journal_id = self.env['account.journal'].search([('type', '=', 'purchase')], limit=1).id\n if not journal_id:\n raise UserError(_('Please define an accounting purchase journal for this company.'))\n invoice_vals = {\n 'name': self.partner_ref or '',\n 'origin': self.name,\n 'type': 'in_invoice',\n 'account_id': self.partner_id.property_account_payable_id.id,\n 'partner_id': self.partner_id.id,\n 'journal_id': journal_id,\n 'currency_id': self.currency_id.id,\n 'comment': self.notes,\n 'payment_term_id': self.payment_term_id.id,\n 'fiscal_position_id': self.fiscal_position_id.id or self.partner_id.property_account_position_id.id,\n 'company_id': self.company_id.id,\n 'purchase_id': self.id,\n 'date_invoice':pytz.utc.localize(datetime.datetime.now()).astimezone(local).strftime('%Y-%m-%d'),\n }\n return invoice_vals", "def create_rental(self, student_id:int, rental_instrument:int, start_date:date, months_to_rent:int):\n try:\n s = start_date\n start_date = self.date_to_strf(s)\n # end_date = \"{}-{:02d}-{:02d}\".format(s.year, s.month + months_to_rent, s.day)\n self.cursor.execute(\"\"\" \n INSERT INTO rental (start_date, end_date, student_id, ri_id)\n VALUES (%s, %s::date + INTERVAL '%s month', %s , %s)\n \"\"\", [start_date, start_date, months_to_rent, student_id, rental_instrument])\n self.db.commit()\n except Exception as e:\n self.db.rollback()\n raise RuntimeError(\"No student found to be able to complete rental.\")", "def pl_create_order(self):\n\tprint()\n\tprint('Pl - Create Order')\n\n\n\tpartner = self.env['res.partner'].search([\n\t\t\t\t\t\t\t\t\t\t\t\t\t('name', '=', self.patient.name),\n\t\t\t\t\t\t\t\t\t\t\t\t],\n\t\t\t\t\t\t\t\t\t\t\t\t#order='appointment_date desc',\n\t\t\t\t\t\t\t\t\t\t\t\tlimit=1,)\n\n\n\t# Create Order\n\torder = self.env['sale.order'].create({\n\t\t\t\t\t\t\t\t\t\t\t\t\t'state':'draft',\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_doctor': self.physician.id,\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t#'partner_id': self.partner_id.id,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'partner_id': partner.id,\n\t\t\t\t\t\t\t\t\t\t\t\t\t#'x_ruc': self.partner_id.x_ruc,\n\t\t\t\t\t\t\t\t\t\t\t\t\t#'x_dni': self.partner_id.x_dni,\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t'patient': self.patient.id,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_id_doc': self.patient.x_id_doc,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_id_doc_type': self.patient.x_id_doc_type,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_family': 'procedure',\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t'treatment': self.id,\n\t\t\t\t\t\t\t\t\t\t\t\t})\n\t#print(order)\n\n\n\n\t# Create Order Lines\n\tfor cart_line in self.shopping_cart_ids:\n\n\t\tproduct = cart_line.product\n\n\t\t#print(product)\n\t\t#print(product.name)\n\n\t\t# Create Order Line\n\t\tol = order.order_line.create({\n\t\t\t\t\t\t\t\t\t\t'name': \t\tproduct.name,\n\t\t\t\t\t\t\t\t\t\t'product_id': \tproduct.id,\n\t\t\t\t\t\t\t\t\t\t'price_unit': \tcart_line.price,\n\t\t\t\t\t\t\t\t\t\t'product_uom_qty': cart_line.qty,\n\t\t\t\t\t\t\t\t\t\t'order_id': \torder.id,\n\t\t\t\t\t\t\t\t\t})\n\treturn order\n\n\t# pl_create_order", "def invoice_create_onaccept(form):\n\n # Get record ID\n form_vars = form.vars\n if \"id\" in form_vars:\n record_id = form_vars.id\n elif hasattr(form, \"record_id\"):\n record_id = form.record_id\n else:\n return\n\n # Look up the billing ID\n table = current.s3db.fin_voucher_invoice\n query = (table.id == record_id)\n invoice = current.db(query).select(table.billing_id,\n limitby = (0, 1),\n ).first()\n\n if invoice:\n # Assign the invoice\n from .helpers import assign_pending_invoices\n assign_pending_invoices(invoice.billing_id,\n invoice_id = record_id,\n )", "def _prepare_invoice(self):\n self.ensure_one()\n journal_id = self.env['account.invoice'].default_get(['journal_id'])['journal_id']\n if not journal_id:\n raise UserError(_('Please define an accounting sales journal for this company.'))\n invoice_vals = {\n 'name': self.client_order_ref or '',\n 'origin': self.name,\n 'type': 'out_invoice',\n 'account_id': self.partner_invoice_id.property_account_receivable_id.id,\n 'partner_id': self.partner_invoice_id.id,\n 'partner_shipping_id': self.partner_shipping_id.id,\n 'journal_id': journal_id,\n 'currency_id': self.pricelist_id.currency_id.id,\n 'comment': self.note,\n 'payment_term_id': self.payment_term_id.id,\n 'fiscal_position_id': self.fiscal_position_id.id or self.partner_invoice_id.property_account_position_id.id,\n 'company_id': self.company_id.id,\n 'user_id': self.user_id and self.user_id.id,\n 'team_id': self.team_id.id,\n 'x_studio_field_rgEdd': self.x_studio_field_icWOZ.id,\n 'x_studio_car_type_1': self.vehicle.id,\n 'x_studio_job_card_1': self.x_studio_agency_job_card,\n 'x_studio_car_type_name': self.vehicle.model_id.name,\n 'x_studio_plate_num': self.vehicle.license_plate,\n 'x_studio_claim_num': self.claim_no,\n\n 'x_studio_is_insured':self.is_insured,\n 'x_studio_service_provider': self.service_advisor.id,\n 'date_invoice': fields.Date.today(),\n 'transaction_ids': [(6, 0, self.transaction_ids.ids)],\n }\n return invoice_vals\n\n # 'x_studio_field_rgEdd':order.x_studio_field_icWOZ.id,", "def _create_nsf_invoice(cls, cfs_account: CfsAccountModel, rs_number: str,\n payment_account: PaymentAccountModel) -> InvoiceModel:\n fee_schedule: FeeScheduleModel = FeeScheduleModel.find_by_filing_type_and_corp_type(corp_type_code='BCR',\n filing_type_code='NSF')\n invoice = InvoiceModel(\n bcol_account=payment_account.bcol_account,\n payment_account_id=payment_account.id,\n cfs_account_id=cfs_account.id,\n invoice_status_code=InvoiceStatus.CREATED.value,\n total=fee_schedule.fee.amount,\n service_fees=0,\n paid=0,\n payment_method_code=PaymentMethod.INTERNAL.value,\n corp_type_code='BCR',\n created_on=datetime.now(),\n created_by='SYSTEM',\n routing_slip=rs_number\n )\n invoice = invoice.save()\n distribution: DistributionCodeModel = DistributionCodeModel.find_by_active_for_fee_schedule(\n fee_schedule.fee_schedule_id)\n\n line_item = PaymentLineItemModel(\n invoice_id=invoice.id,\n total=invoice.total,\n fee_schedule_id=fee_schedule.fee_schedule_id,\n description=fee_schedule.filing_type.description,\n filing_fees=invoice.total,\n gst=0,\n priority_fees=0,\n pst=0,\n future_effective_fees=0,\n line_item_status_code=LineItemStatus.ACTIVE.value,\n service_fees=0,\n fee_distribution_id=distribution.distribution_code_id)\n line_item.save()\n\n invoice_response = CFSService.create_account_invoice(transaction_number=invoice.id,\n line_items=invoice.payment_line_items,\n cfs_account=cfs_account)\n\n invoice_number = invoice_response.get('invoice_number', None)\n current_app.logger.info(f'invoice_number {invoice_number} created in CFS for NSF.')\n\n InvoiceReferenceModel(\n invoice_id=invoice.id,\n invoice_number=invoice_number,\n reference_number=invoice_response.get('pbc_ref_number', None),\n status_code=InvoiceReferenceStatus.ACTIVE.value\n ).save()\n\n return invoice", "def create_proforma_invoice(sender, instance, created, **kwargs):\n if created:\n Invoice.create(instance, Invoice.INVOICE_TYPES['PROFORMA'])", "def prepare_invoice(self):\n journal_id = self.env['account.invoice'].default_get(['journal_id'])['journal_id']\n if not journal_id:\n raise UserError(_('Please define sales journal for this company: \"%s\" (id:%d).') % (self.company_id.name, self.company_id.id))\n invoice_vals = {\n 'order_id': self.id,\n 'name': self.order_no,\n 'origin': self.order_no,\n 'type': 'out_invoice',\n 'reference': self.patient_id.name + ':' + self.name,\n 'account_id': self.patient_id.partner_id.property_account_receivable_id.id,\n 'partner_id': self.patient_id.partner_id.id,\n 'journal_id': journal_id,\n 'comment': self.note,\n 'doctor_id': self.doctor_id.id,\n 'payment_term': False,\n 'user_id': False,\n }\n return invoice_vals", "def create_invoices(self, cr, uid, ids, context=None):\n invoice_list = []\n po_obj = self.pool.get('purchase.order')\n inv_line_obj = self.pool.get('account.invoice.line')\n inv_obj = self.pool.get('account.invoice')\n addr_obj = self.pool.get('res.partner')\n journal_obj = self.pool.get('account.journal')\n if context is None:\n context = {}\n\n for purchase_adv_obj in self.browse(cr, uid, ids, context=context):\n for purchase_order in po_obj.browse(cr, uid, context.get('active_ids', []), context=context):\n inv_line_ids = []\n invoice_ids = []\n val = inv_line_obj.product_id_change(cr, uid, [], purchase_adv_obj.product_id.id,\n uom_id=False, partner_id=purchase_order.partner_id.id, fposition_id=purchase_order.fiscal_position.id)\n line_id = inv_line_obj.create(cr, uid, {\n 'name': val['value']['name'],\n 'account_id': val['value']['account_id'],\n 'price_unit': purchase_adv_obj.amount,\n 'quantity': purchase_adv_obj.qtty,\n 'discount': False,\n 'uos_id': val['value']['uos_id'],\n 'product_id': purchase_adv_obj.product_id.id,\n 'invoice_line_tax_id': [(6, 0, val['value']['invoice_line_tax_id'])],\n })\n inv_line_ids.append(line_id)\n addr = addr_obj.address_get(cr, uid, [purchase_order.partner_id.id], ['invoice'])\n journal_ids = journal_obj.search(cr, uid, [('type', '=', 'purchase')])\n context.update({'type':'in_invoice','journal_type':'purchase'})\n inv_vals = {\n 'name': purchase_order.partner_ref or purchase_order.name,\n 'origin': purchase_order.name,\n 'type': 'in_invoice',\n 'reference': False,\n 'account_id': purchase_order.partner_id.property_account_payable.id,\n 'journal_id':journal_ids and journal_ids[0] or False,\n 'partner_id': purchase_order.partner_id.id,\n 'address_invoice_id': addr['invoice'],\n 'invoice_line': [(6, 0, inv_line_ids)],\n 'currency_id': purchase_order.pricelist_id.currency_id.id,\n 'comment': '',\n 'payment_term': purchase_order.payment_term_id and purchase_order.payment_term_id.id or False,\n 'fiscal_position': purchase_order.fiscal_position.id or purchase_order.partner_id.property_account_position.id,\n 'prepaid': True\n }\n\n inv_id = inv_obj.create(cr, uid, inv_vals, context=context)\n inv_obj.button_reset_taxes(cr, uid, [inv_id], context=context)\n for invoice in purchase_order.invoice_ids:\n invoice_ids.append(invoice.id)\n invoice_ids.append(inv_id)\n po_obj.write(cr, uid, purchase_order.id, {'invoice_ids': [(6, 0, invoice_ids)]})\n invoice_list.append(inv_id)\n\n if purchase_order.invoice_method in ('picking','order'):\n self.pool.get('purchase.order.line').create(cr, uid, {\n 'order_id': purchase_order.id,\n 'name': val['value']['name'],\n 'date_planned':purchase_order.date_order,\n 'price_unit': -purchase_adv_obj.amount,\n 'product_uom_qty': purchase_adv_obj.qtty,\n 'product_uos': val['value']['uos_id'],\n 'product_uom': val['value']['uos_id'],\n 'product_id': purchase_adv_obj.product_id.id,\n 'adavance_product':True,\n 'discount': False,\n 'taxes_id': [(6, 0, val['value']['invoice_line_tax_id'])],\n }, context=context)\n\n\n context.update({'invoice_id':invoice_list})\n return {\n 'name': 'Open Invoice',\n 'view_type': 'form',\n 'view_mode': 'form',\n 'res_model': 'purchase.open.invoice',\n 'type': 'ir.actions.act_window',\n 'target': 'new',\n 'context': context\n }", "def create_order(self, serializer):\n data = serializer.validated_data\n service: Service = data['service']\n customer: Customer = Customer.objects.get_or_create(\n email=data['email'])[0]\n invoice: Invoice = Invoice(\n charged_amount=service.price.amount,\n currency=service.price.currency,\n timestamp=now(),\n customer=customer,\n service=service\n )\n invoice.save()\n serializer.validated_data['invoice_id'] = invoice.id\n serializer.save()\n\n self.send_order_email(invoice, serializer.instance)", "def do_create(service,summary,description,startday,\\\n starttime,endtime,username,email):\n event = {\n 'summary': 'Code Clinic: {}'.format(summary),\n 'description': '{}.'.format(description),\n 'start': {\n 'dateTime': '{}T{}:00'.format(startday, starttime),\n 'timeZone': 'GMT+02',\n },\n 'end': {\n 'dateTime': '{}T{}:00'.format(startday,endtime),\n 'timeZone': 'GMT+02',\n },\n 'recurrence': [\n 'RRULE:FREQ=DAILY;COUNT=1'\n ],\n 'attendees': [\n {\n 'displayName': username,\n 'email': email,\n 'optional': True,\n 'comment': 'Creator',\n 'responseStatus': 'accepted',\n },\n ],\n 'anyoneCanAddSelf': True,\n\n 'reminders': {\n 'useDefault': False,\n 'overrides': [\n {'method': 'email', 'minutes': 24 * 60},\n {'method': 'popup', 'minutes': 10},\n ],\n },\n }\n\n event = service.events().insert(calendarId='primary', body=event,\\\n sendUpdates='all').execute()\n\n return event", "def test_invoice_create(self):\n # first we create a customer\n id = self._create_model(\"customer\", self.customer_data, [\"name\", \"email\", \"phone\"])\n if id:\n # then we can create the invoice\n data = self.invoice_data\n data[\"customer_id\"] = id\n self._create_model(\"invoice\", data, [])\n self.assertIsNotNone(id)", "def _prepare_invoice(self):\n self.ensure_one()\n # journal_id = self.env['account.invoice'].with_context(force_company=self.env.user.company_id.id).default_get(['journal_id'])['journal_id']\n journal_id = self.company_id.journal_id.id\n if not journal_id:\n raise UserError(_('Please define an accounting sales journal for this company.'))\n invoice_vals = {\n 'name': self.client_order_ref or '',\n 'origin': self.name,\n 'type': 'out_invoice',\n 'account_id': self.partner_invoice_id.property_account_receivable_id.id,\n 'partner_id': self.partner_invoice_id.id,\n 'partner_shipping_id': self.partner_shipping_id.id,\n 'journal_id': journal_id,\n 'currency_id': self.pricelist_id.currency_id.id,\n 'comment': self.note,\n 'payment_term_id': self.payment_term_id.id,\n 'fiscal_position_id': self.fiscal_position_id.id or self.partner_invoice_id.property_account_position_id.id,\n 'company_id': self.company_id.id,\n 'user_id': self.user_id and self.user_id.id,\n 'team_id': self.team_id.id\n }\n return invoice_vals", "def save(request):\n inv_num = request.POST[\"invoice_number\"]\n initial_data, data = process_request(request)\n tax_data = json.loads(request.POST[\"tax_data\"].replace(\"'\", \"\\\"\"))\n grand_total = request.POST[\"grand_total\"]\n\n Invoice.objects.create(number=inv_num,\n invoice_date=datetime.datetime.strptime(initial_data.get(\"invoice_date\"), \"%d %B, %Y\"),\n reference_number=initial_data.get(\"reference_number\"),\n reference_date=datetime.datetime.strptime(initial_data.get(\"reference_date\"), \"%d %B, %Y\"),\n addressed_to=initial_data.get(\"addressed_to\"),\n party_gst=initial_data.get(\"party_gst\"),\n created_at=datetime.datetime.now(),\n modified_at=datetime.datetime.now(),\n notes=tax_data.get(\"additional_notes\"),\n items=data,\n s_gst=tax_data.get(\"s_gst\"),\n c_gst=tax_data.get(\"c_gst\"),\n other_charges=tax_data.get(\"other_charges\"),\n total=grand_total\n ).save()\n\n return redirect(\"/invoice/print/\" + inv_num)", "def create_order(self):\n\tprint()\n\tprint('OH - pl_create_order')\n\n\t# Search Partner\n\tprint()\n\tprint('Search partner')\n\tpartner = self.env['res.partner'].search([\n\t\t\t\t\t\t\t\t\t\t\t\t\t('name', '=', self.patient.name),\n\t\t\t\t\t\t\t\t\t\t\t\t],\n\t\t\t\t\t\t\t\t\t\t\t\t#order='appointment_date desc',\n\t\t\t\t\t\t\t\t\t\t\t\tlimit=1,)\n\n\t# Search Pl\n\tprint()\n\tprint('Search pricelist')\n\tpricelist = self.env['product.pricelist'].search([\n\t\t\t\t\t\t\t\t\t\t\t#('active', 'in', [True]),\n\t\t\t\t\t\t\t\t\t\t\t],\n\t\t\t\t\t\t\t\t\t\t\t#order='x_serial_nr asc',\n\t\t\t\t\t\t\t\t\t\t\tlimit=1,\n\t\t\t\t\t\t\t\t\t\t)\n\tprint(pricelist)\n\n\t# Create Order\n\torder = self.env['sale.order'].create({\n\t\t\t\t\t\t\t\t\t\t\t\t\t'state':'draft',\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_doctor': self.physician.id,\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t#'partner_id': self.partner_id.id,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'partner_id': partner.id,\n\t\t\t\t\t\t\t\t\t\t\t\t\t#'x_ruc': self.partner_id.x_ruc,\n\t\t\t\t\t\t\t\t\t\t\t\t\t#'x_dni': self.partner_id.x_dni,\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t'patient': self.patient.id,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_id_doc': self.patient.x_id_doc,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_id_doc_type': self.patient.x_id_doc_type,\n\t\t\t\t\t\t\t\t\t\t\t\t\t'x_family': 'procedure',\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t'treatment': self.id,\n\n\t\t\t\t\t\t\t\t\t\t\t\t\t'pricelist_id': pricelist.id,\n\t\t\t\t\t\t\t\t\t\t\t\t})\n\t#print(order)\n\n\n\n\t# Create Order Lines\n\tfor cart_line in self.shopping_cart_ids:\n\n\t\tproduct = cart_line.product\n\n\t\t#print(product)\n\t\t#print(product.name)\n\n\t\t# Create Order Line\n\t\tol = order.order_line.create({\n\t\t\t\t\t\t\t\t\t\t'name': \t\tproduct.name,\n\t\t\t\t\t\t\t\t\t\t'product_id': \tproduct.id,\n\t\t\t\t\t\t\t\t\t\t'price_unit': \tcart_line.price,\n\t\t\t\t\t\t\t\t\t\t'product_uom_qty': cart_line.qty,\n\t\t\t\t\t\t\t\t\t\t'order_id': \torder.id,\n\t\t\t\t\t\t\t\t\t})\n\treturn order", "def create_invoice(cls, payment_request: Tuple[Dict[str, Any]], authorization: Tuple[Dict[str, Any]]) -> Dict:\n # pylint: disable=too-many-locals, too-many-statements\n business_info = payment_request.get('businessInfo')\n filing_info = payment_request.get('filingInfo')\n account_info = payment_request.get('accountInfo', None)\n corp_type = business_info.get('corpType', None)\n business_identifier = business_info.get('businessIdentifier')\n\n payment_account = cls._find_payment_account(authorization)\n payment_method = _get_payment_method(payment_request, payment_account)\n current_app.logger.info(f'Creating Payment Request : '\n f'{payment_method}, {corp_type}, {business_identifier}, '\n f'{payment_account.auth_account_id}')\n\n bcol_account = cls._get_bcol_account(account_info, payment_account)\n\n # Calculate the fees\n fees = _calculate_fees(corp_type, filing_info)\n\n # Create payment system instance from factory\n pay_service: PaymentSystemService = PaymentSystemFactory.create(\n payment_method=payment_method,\n corp_type=corp_type,\n fees=sum(fee.total for fee in fees),\n account_info=account_info,\n payment_account=payment_account\n )\n current_app.logger.info(f'Created Pay System Instance : {pay_service}')\n\n pay_system_invoice: Dict[str, any] = None\n invoice: Invoice = None\n\n try:\n invoice = Invoice()\n invoice.bcol_account = bcol_account\n invoice.payment_account_id = payment_account.id\n invoice.cfs_account_id = payment_account.cfs_account_id\n invoice.invoice_status_code = pay_service.get_default_invoice_status()\n invoice.service_fees = sum(fee.service_fees for fee in fees) if fees else 0\n invoice.total = sum(fee.total for fee in fees) if fees else 0\n invoice.paid = 0\n invoice.refund = 0\n invoice.routing_slip = get_str_by_path(account_info, 'routingSlip')\n invoice.filing_id = filing_info.get('filingIdentifier', None)\n invoice.dat_number = get_str_by_path(account_info, 'datNumber')\n invoice.folio_number = filing_info.get('folioNumber', None)\n invoice.business_identifier = business_identifier\n invoice.payment_method_code = pay_service.get_payment_method_code()\n invoice.corp_type_code = corp_type\n details = payment_request.get('details')\n if not details or details == 'null':\n details = []\n invoice.details = details\n invoice = invoice.flush()\n\n line_items = []\n for fee in fees:\n line_items.append(PaymentLineItem.create(invoice.id, fee))\n\n current_app.logger.info(f'Handing off to payment system to create invoice for {invoice.id}')\n invoice_reference = pay_service.create_invoice(payment_account, line_items, invoice,\n corp_type_code=invoice.corp_type_code)\n\n invoice.commit()\n\n pay_service.complete_post_invoice(invoice, invoice_reference)\n\n invoice = Invoice.find_by_id(invoice.id, skip_auth_check=True)\n\n except Exception as e: # NOQA pylint: disable=broad-except\n current_app.logger.error('Rolling back as error occured!')\n current_app.logger.error(e)\n if invoice:\n invoice.rollback()\n if pay_system_invoice:\n pay_service.cancel_invoice(\n payment_account,\n pay_system_invoice.get('invoice_number'),\n )\n raise\n\n current_app.logger.debug('>Finished creating payment request')\n\n return invoice.asdict(include_dynamic_fields=True)", "def test_invoice_item_create(self):\n # first we create a customer\n id = self._create_model(\"customer\", self.customer_data, [\"name\", \"email\", \"phone\"])\n if id:\n # then we create a invoice\n data = self.invoice_data\n data[\"customer_id\"] = id\n id_inv = self._create_model(\"invoice\", data, [])\n if id_inv:\n # then we create a product\n id_prod = self._create_model(\"product\", self.product_data, [\"name\", \"description\", \"image_link\", \"price\"])\n if id_prod:\n # then we can create the invoice's item\n data = self.invoice_item_data\n data[\"invoice_id\"] = id_inv\n data[\"product_id\"] = id_prod\n self._create_model(\"invoiceitem\", data, [\"quantity\", \"quote_price\"])\n self.assertIsNotNone(id_prod)\n self.assertIsNotNone(id_inv)\n self.assertIsNotNone(id)", "def invoice(self, id):\r\n return Invoice(self, id)", "def create_order_amended_invoice(sender, instance, using, **kwargs):\n\n sender_name = sender._meta.model.__name__\n\n if sender_name == \"WillOrder\":\n order = instance\n elif sender_name == \"Allocation\":\n order = instance.asset_store.order\n else:\n order = instance.order\n\n if Invoice.objects.filter(\n order=order, been_paid=True, parent_invoice=None\n ).exists():\n amended_invoice_required = False\n latest_paid_invoice = order.invoice.latest_paid()\n print(\"latest_paid_invoice\", latest_paid_invoice)\n if latest_paid_invoice:\n order_details = InvoiceService(order).limit_details\n\n for order_detail, order_numbers in order_details.items():\n try:\n willorder_limit = OrderLimit.objects.get(\n invoice=latest_paid_invoice, detail=order_detail\n )\n if order_numbers > willorder_limit.limit:\n amended_invoice_required = True\n except OrderLimit.DoesNotExist:\n amended_invoice_required = True\n\n parent_invoice = Invoice.objects.get(order=order, parent_invoice=None)\n\n if amended_invoice_required:\n if Invoice.objects.filter(\n order=order, been_paid=False, parent_invoice=parent_invoice\n ).exists():\n print(\"UPDATE AMENDED INVOICE\")\n order.invoice.latest().update_invoice()\n else:\n Invoice.objects.create(\n order=order, parent_invoice=parent_invoice)\n else:\n print(\"DELETE AMENDED INVOICE\")\n if Invoice.objects.filter(\n order=order, been_paid=False, parent_invoice=parent_invoice\n ).exists():\n Invoice.objects.get(\n order=order, parent_invoice=parent_invoice, been_paid=False\n ).delete()", "def create_rent_schedule_landlord(self):\n rent_obj = self.env['tenancy.rent.schedule']\n for tenancy_rec in self:\n amount = tenancy_rec.landlord_rent\n if tenancy_rec.rent_type_id.renttype == 'Weekly':\n d1 = tenancy_rec.date_start\n d2 = tenancy_rec.date\n interval = int(tenancy_rec.rent_type_id.name)\n if d2 < d1:\n raise Warning(\n _('End date must be greater than start date.'))\n wek_diff = (d2 - d1)\n wek_tot1 = (wek_diff.days) / (interval * 7)\n wek_tot = (wek_diff.days) % (interval * 7)\n if wek_diff.days == 0:\n wek_tot = 1\n if wek_tot1 > 0:\n for wek_rec in range(wek_tot1):\n rent_obj.create(\n {\n 'start_date': d1,\n 'amount': amount * interval or 0.0,\n 'property_id': tenancy_rec.property_id and\n tenancy_rec.property_id.id or False,\n 'tenancy_id': tenancy_rec.id,\n 'currency_id': tenancy_rec.currency_id.id or\n False,\n 'rel_tenant_id': tenancy_rec.tenant_id.id\n })\n d1 = d1 + relativedelta(days=(7 * interval))\n if wek_tot > 0:\n one_day_rent = 0.0\n if amount:\n one_day_rent = (amount) / (7 * interval)\n rent_obj.create({\n 'start_date': d1.strftime(\n DEFAULT_SERVER_DATE_FORMAT),\n 'amount': (one_day_rent * (wek_tot)) or 0.0,\n 'property_id': tenancy_rec.property_id and\n tenancy_rec.property_id.id or False,\n 'tenancy_id': tenancy_rec.id,\n 'currency_id': tenancy_rec.currency_id.id or False,\n 'rel_tenant_id': tenancy_rec.tenant_id.id\n })\n elif tenancy_rec.rent_type_id.renttype != 'Weekly':\n if tenancy_rec.rent_type_id.renttype == 'Monthly':\n interval = int(tenancy_rec.rent_type_id.name)\n if tenancy_rec.rent_type_id.renttype == 'Yearly':\n interval = int(tenancy_rec.rent_type_id.name) * 12\n d1 = tenancy_rec.date_start\n d2 = tenancy_rec.date\n diff = abs((d1.year - d2.year) * 12 + (d1.month - d2.month))\n tot_rec = diff / interval\n tot_rec2 = diff % interval\n if abs(d1.month - d2.month) >= 0 and d1.day < d2.day:\n tot_rec2 += 1\n if diff == 0:\n tot_rec2 = 1\n if tot_rec > 0:\n tot_rec = int(tot_rec)\n for rec in range(tot_rec):\n rent_obj.create({\n 'start_date': d1.strftime(\n DEFAULT_SERVER_DATE_FORMAT),\n 'amount': amount * interval or 0.0,\n 'property_id': tenancy_rec.property_id and\n tenancy_rec.property_id.id or False,\n 'tenancy_id': tenancy_rec.id,\n 'currency_id': tenancy_rec.currency_id.id or\n False,\n 'rel_tenant_id': tenancy_rec.tenant_id.id\n })\n d1 = d1 + relativedelta(months=interval)\n if tot_rec2 > 0:\n rent_obj.create({\n 'start_date': d1.strftime(DEFAULT_SERVER_DATE_FORMAT),\n 'amount': amount * tot_rec2 or 0.0,\n 'property_id': tenancy_rec.property_id and\n tenancy_rec.property_id.id or False,\n 'tenancy_id': tenancy_rec.id,\n 'currency_id': tenancy_rec.currency_id.id or False,\n 'rel_tenant_id': tenancy_rec.tenant_id.id\n })\n return self.write({'rent_entry_chck': True})", "def create_order(cls, invoice):\n order = cls(\n order_id=str(uuid.uuid4().int),\n invoice=invoice\n ).save()\n\n invoice_line_items = InvoiceLineItem.objects.filter(invoice=invoice, type=\"item\").all()\n\n for invoice_line_item in invoice_line_items:\n OrderLineItem.create_order_line_item(order=order, invoice_line_item=invoice_line_item)\n\n return order", "def generate_new_visit(self):\n if self.consecutive:\n customer_id = np.random.choice(\n self.customerIds, 1\n ) # choose a customer at random\n insured = self.Customers[self.Customers[\"customer_id\"] == customer_id[0]][\n \"insurance\"\n ].values[\n 0\n ] # does the customer have insurance?\n experiment_id = self.Customers[\n self.Customers[\"customer_id\"] == customer_id[0]\n ][\"experiment_id\"].values[\n 0\n ] # does the customer have insurance?\n\n event_list = (\n self.billing_choose_dates()\n ) # generate dates associated with this invoice\n cpt_code = random.sample(self.CPTCodes, 1)[0]\n date_of_service = str(event_list.values[0][0])\n created_on = str(event_list.values[1][0])\n date_of_eob = str(event_list.values[2][0])\n date_of_provider_adjustment = str(event_list.values[3][0])\n date_of_patient_payment = str(event_list.values[4][0])\n # generate a new invoice\n (invoice_id, charge_amount) = self.generate_new_invoice(\n created_on, date_of_service, customer_id, cpt_code\n )\n # generate subsequent EOB (i.e. copay, EOB adjustment, EOB payment)\n remaining_amount = self.generate_eob(\n date_of_service,\n date_of_eob,\n insured,\n invoice_id,\n cpt_code,\n charge_amount,\n )\n # generate provider adjustments\n remaining_amount = self.generate_provider_adjustment(\n date_of_provider_adjustment, invoice_id, cpt_code, remaining_amount\n )\n # generate a possible payment from the patient\n remaining_amount = self.generate_patient_payment(\n date_of_patient_payment,\n invoice_id,\n cpt_code,\n remaining_amount,\n experiment_id,\n )\n # record the remaining amounts in a separate table.\n self.record_remaining_amount(\n date_of_patient_payment, invoice_id, cpt_code, remaining_amount\n )\n return True\n else:\n print(\"Error generating new invoice- customerIds aren't consecutive\")", "def create_order_invoice(sender, instance, created, using, **kwargs):\n\n # Create invoice if it doesn't already exist\n if (\n created\n and not Invoice.objects.filter(\n order__order_number=instance.order_number\n ).exists()\n ):\n invoice = Invoice(order=instance)\n # Saving it in reverse to avoid having this signal called again\n invoice.save()\n\n for slug, cls in discount_rules.get_all_discount_rules():\n if cls.can_user_have_access(instance.user, invoice):\n cls.apply_discount(instance.user, invoice)", "def action_move_create(self, cr, uid, ids, context=None):\n ait_obj = self.pool.get('account.invoice.tax')\n cur_obj = self.pool.get('res.currency')\n period_obj = self.pool.get('account.period')\n payment_term_obj = self.pool.get('account.payment.term')\n journal_obj = self.pool.get('account.journal')\n move_obj = self.pool.get('account.move')\n if context is None:\n context = {}\n for inv in self.browse(cr, uid, ids, context=context):\n if not inv.journal_id:\n raise orm.except_orm(_('Error!'),\n _('Journal not defined for this invoice!'))\n if not inv.journal_id.iva_registry_id:\n raise orm.except_orm(_('Error!'),\n _('You must link %s with a VAT registry!') % (inv.journal_id.name))\n if not inv.journal_id.sequence_id:\n raise orm.except_orm(_('Error!'),\n _('Please define sequence on the journal related to this invoice.')) \n if not inv.invoice_line:\n raise orm.except_orm(_('No Invoice Lines!'),\n _('Please create some invoice lines.'))\n if inv.move_id:\n continue\n\n ctx = context.copy()\n ctx.update({'lang': inv.partner_id.lang})\n if not inv.date_invoice:\n self.write(cr, uid, [inv.id],\n {'date_invoice': fields.date.context_today(self,\n cr,\n uid,\n context=context)},\n context=ctx)\n company_currency = self.pool['res.company'].browse(cr, uid,\n inv.company_id.id).currency_id.id\n # create the analytical lines\n # one move line per invoice line\n # iml = self._get_analytic_lines(cr, uid, inv.id, context=ctx)\n iml = super(account_invoice_makeover, self)._get_analytic_lines(cr, uid, inv.id, context=ctx)\n # check if taxes are all computed\n compute_taxes = ait_obj.compute(cr, uid, inv.id, context=ctx)\n # self.check_tax_lines(cr, uid, inv, compute_taxes, ait_obj)\n super(account_invoice_makeover, self).check_tax_lines(cr, uid, inv, compute_taxes, ait_obj)\n\n # I disabled the check_total feature\n group_check_total_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account', 'group_supplier_inv_check_total')[1]\n group_check_total = self.pool.get('res.groups').browse(cr, uid,\n group_check_total_id,\n context=context)\n if group_check_total and uid in [x.id for x in group_check_total.users]:\n if (inv.type in ('in_invoice', 'in_refund') and abs(inv.check_total - inv.amount_total) >= (inv.currency_id.rounding / 2.0)):\n raise orm.except_orm(_('Bad Total!'), _('Please verify the price of the invoice!\\nThe encoded total does not match the computed total.'))\n\n if inv.payment_term:\n total_fixed = total_percent = 0\n for line in inv.payment_term.line_ids:\n if line.value == 'fixed':\n total_fixed += line.value_amount\n if line.value == 'procent':\n total_percent += line.value_amount\n total_fixed = (total_fixed * 100) / (inv.amount_total or 1.0)\n if (total_fixed + total_percent) > 100:\n raise orm.except_orm(_('Error!'), _(\"Cannot create the invoice.\\nThe related payment term is probably misconfigured as it gives a computed amount greater than the total invoiced amount. In order to avoid rounding issues, the latest line of your payment term must be of type 'balance'.\"))\n\n # one move line per tax line\n iml += ait_obj.move_line_get(cr, uid, inv.id)\n\n# entry_type = ''\n if inv.type in ('in_invoice', 'in_refund'):\n ref = inv.reference\n# entry_type = 'journal_pur_voucher'\n# if inv.type == 'in_refund':\n# entry_type = 'cont_voucher'\n else:\n # ref = self._convert_ref(cr, uid, inv.number)\n ref = super(account_invoice_makeover, self)._convert_ref(cr, uid, inv.number)\n# entry_type = 'journal_sale_vou'\n# if inv.type == 'out_refund':\n# entry_type = 'cont_voucher'\n\n diff_currency_p = inv.currency_id.id <> company_currency\n # create one move line for the total and possibly adjust the other lines amount\n total = 0\n total_currency = 0\n # total, total_currency, iml = self.compute_invoice_totals(cr, uid, inv, company_currency, ref, iml, context=ctx)\n total, total_currency, iml = super(account_invoice_makeover, self).compute_invoice_totals(cr, uid, inv, company_currency, ref, iml, context=ctx)\n acc_id = inv.account_id.id\n\n name = inv['name'] or inv['supplier_invoice_number'] or '/'\n totlines = False\n if inv.payment_term:\n totlines = payment_term_obj.compute(cr,\n uid, inv.payment_term.id, total, inv.date_invoice or False, context=ctx)\n if totlines:\n res_amount_currency = total_currency\n i = 0\n ctx.update({'date': inv.date_invoice})\n for t_line in totlines:\n if inv.currency_id.id != company_currency:\n amount_currency = cur_obj.compute(cr, uid, company_currency, inv.currency_id.id, t_line[1], context=ctx)\n else:\n amount_currency = False\n\n # last line add the diff\n res_amount_currency -= amount_currency or 0\n i += 1\n if i == len(totlines):\n amount_currency += res_amount_currency\n\n iml.append({\n 'type': 'dest',\n 'name': name,\n 'price': t_line[1],\n 'account_id': acc_id,\n 'date_maturity': t_line[0],\n 'amount_currency': diff_currency_p \\\n and amount_currency or False,\n 'currency_id': diff_currency_p \\\n and inv.currency_id.id or False,\n 'ref': ref,\n 'payment_type': t_line[2]\n })\n else:\n iml.append({\n 'type': 'dest',\n 'name': name,\n 'price': total,\n 'account_id': acc_id,\n 'date_maturity': inv.date_due or False,\n 'amount_currency': diff_currency_p \\\n and total_currency or False,\n 'currency_id': diff_currency_p \\\n and inv.currency_id.id or False,\n 'ref': ref,\n 'payment_type': None\n })\n\n date = inv.date_invoice or time.strftime('%Y-%m-%d')\n\n part = self.pool.get(\"res.partner\")._find_accounting_partner(inv.partner_id)\n\n line = map(lambda x:(0, 0, self.line_get_convert(cr, uid, x, part.id, date, context=ctx)), iml)\n\n # line = self.group_lines(cr, uid, iml, line, inv)\n line = super(account_invoice_makeover, self).group_lines(cr, uid, iml, line, inv)\n\n journal_id = inv.journal_id.id\n journal = journal_obj.browse(cr, uid, journal_id, context=ctx)\n if journal.centralisation:\n raise orm.except_orm(_('User Error!'),\n _('You cannot create an invoice on a centralized journal. Uncheck the centralized counterpart box in the related journal from the configuration menu.'))\n\n line = self.finalize_invoice_move_lines(cr, uid, inv, line)\n\n move = {\n 'ref': inv.reference and inv.reference or inv.name,\n 'line_id': line,\n 'journal_id': journal_id,\n 'date': date,\n 'narration': inv.comment,\n 'company_id': inv.company_id.id,\n }\n period_id = inv.period_id and inv.period_id.id or False\n ctx.update(company_id=inv.company_id.id,\n account_period_prefer_normal=True)\n if not period_id:\n period_ids = period_obj.find(cr, uid, inv.registration_date, context=ctx)\n period_id = period_ids and period_ids[0] or False\n if period_id:\n move['period_id'] = period_id\n for i in line:\n i[2]['period_id'] = period_id\n\n ctx.update(invoice=inv)\n move_id = move_obj.create(cr, uid, move, context=ctx)\n new_move_name = move_obj.browse(cr, uid, move_id, context=ctx).name\n # make the invoice point to that move\n self.write(cr, uid, [inv.id], {'move_id': move_id, 'period_id':period_id, 'move_name':new_move_name}, context=ctx)\n # Pass invoice in context in method post: used if you want to get the same\n # account move reference when creating the same invoice after a cancelled one:\n move_obj.post(cr, uid, [move_id], context=ctx)\n # self._log_event(cr, uid, ids)\n super(account_invoice_makeover, self)._log_event(cr, uid, ids)\n return True", "def _prepare_invoice(self, cr, uid, order, lines, context=None):\n if context is None:\n context = {}\n journal_id = self.pool['account.invoice'].default_get(cr, uid, ['journal_id'], context=context)['journal_id']\n if not journal_id:\n raise osv.except_osv(_('Error!'),\n _('Please define sales journal for this company: \"%s\" (id:%d).') % (order.company_id.name, order.company_id.id))\n invoice_vals = {\n 'name': order.client_order_ref or '',\n 'origin': order.name,\n 'type': 'out_invoice',\n 'reference': order.client_order_ref or order.name,\n 'account_id': order.partner_invoice_id.property_account_receivable.id,\n 'partner_id': order.partner_invoice_id.id,\n 'journal_id': journal_id,\n 'invoice_line': [(6, 0, lines)],\n 'currency_id': order.pricelist_id.currency_id.id,\n 'comment': order.note,\n 'payment_term': order.payment_term and order.payment_term.id or False,\n 'fiscal_position': order.fiscal_position.id or order.partner_invoice_id.property_account_position.id,\n 'date_invoice': context.get('date_invoice', False),\n 'company_id': order.company_id.id,\n 'user_id': order.user_id and order.user_id.id or False,\n 'section_id' : order.section_id.id,\n 'test_1' :order.test\n }\n\n # Care for deprecated _inv_get() hook - FIXME: to be removed after 6.1\n invoice_vals.update(self._inv_get(cr, uid, order, context=context))\n return invoice_vals", "def create(request):\n if request.method == \"POST\":\n form = InitialInvoice(data=request.POST)\n if form.is_valid():\n data = form.cleaned_data\n return render(request,\n \"invoice/invoice_create.html\",\n {\n \"form\": ItemForm(),\n \"stage\": \"2\",\n \"initial_data\": data\n })\n\n return render(request,\n \"invoice/invoice_create.html\",\n {\n \"form\": InitialInvoice(),\n \"stage\": \"1\"\n })", "def _prepare_invoice(self, cr, uid, order, lines, context=None):\n if context is None:\n context = {}\n journal_ids = self.pool.get('account.journal').search(cr, uid,\n [('type', '=', 'sale'), ('company_id', '=', order.company_id.id)],\n limit=1)\n if not journal_ids:\n raise osv.except_osv(_('Error !'),\n _('There is no sales journal defined for this company: \"%s\" (id:%d)') % (order.company_id.name, order.company_id.id))\n\n invoice_vals = {\n 'name': order.client_order_ref or '',\n 'origin': order.name,\n 'type': 'out_invoice',\n 'reference': order.client_order_ref or order.name,\n 'account_id': order.partner_id.property_account_receivable.id,\n 'journal_id': order.partner_id.property_default_sale_invoice_journal.id,\n 'partner_id': order.partner_id.id,\n 'address_invoice_id': order.partner_invoice_id.id,\n #'address_contact_id': order.partner_order_id.id,\n 'invoice_line': [(6, 0, lines)],\n 'currency_id': order.pricelist_id.currency_id.id,\n 'comment': order.note,\n 'payment_term': order.payment_term and order.payment_term.id or False,\n 'fiscal_position': order.fiscal_position.id or order.partner_id.property_account_position.id,\n 'date_invoice': context.get('date_invoice', False),\n 'company_id': order.company_id.id,\n 'user_id': order.user_id and order.user_id.id or False\n }\n\n # Care for deprecated _inv_get() hook - FIXME: to be removed after 6.1\n invoice_vals.update(self._inv_get(cr, uid, order, context=context))\n\n return invoice_vals", "def add_investment():\n\n company_name = request.args.get('company-name')\n date_of_entry = datetime.datetime.today().strftime('%Y-%m-%d')\n \n input_quantity = request.args.get('quantity')\n quantity = int(str(input_quantity).replace(',', ''))\n \n input_cost = request.args.get('cost')\n cost = int(str(input_cost).replace(',', ''))\n\n date_of_investment = request.args.get('date')\n\n new_inv = Investment(date_of_entry=date_of_entry, \n date_of_investment=date_of_investment,\n company_name=company_name, \n quantity=quantity, \n cost=cost)\n \n db.session.add(new_inv)\n db.session.commit()\n\n user_id = session['user']\n new_inv_id = new_inv.inv_id\n\n\n new_userinv = UserInv(inv_id=new_inv_id,\n user_id=user_id)\n db.session.add(new_userinv)\n db.session.commit()\n\n return jsonify('investment added!')", "def create_new_invoice(month, year, document, description, amount):\n try:\n conn = sqlite3.connect(DATABASE)\n cursor = conn.cursor()\n\n is_active = True\n now = datetime.now()\n query = '''\n INSERT INTO invoice\n (ReferenceMonth, ReferenceYear, Document, Description, Amount, IsActive, CreatedAt)\n values\n (?, ?, ?, ?, ?, ?, ?)'''\n cursor.execute(query, (month, year, document, description, amount, is_active, now))\n conn.commit()\n cursor.close()\n return True\n except:\n return False", "def invoice(self, invoice_number):\r\n return inv.Invoice(self, invoice_number)", "def invoice_print(self):\n self.ensure_one()\n self.sent = True\n return self.env['report'].get_action(self, 'ferrua_report.report_invoice')", "def add_invoice() -> str:\r\n invoice_details = []\r\n #Catching values user has entered in UI\r\n invoice_number = request.args.get(\"invoice_number\")\r\n invoice_details.append(invoice_number)\r\n customer = request.args.get(\"customer\")\r\n invoice_details.append(customer)\r\n date_required = request.args.get(\"date_required\")\r\n invoice_details.append(date_required)\r\n recipe = request.args.get(\"recipe\")\r\n invoice_details.append(recipe)\r\n gyle_number = request.args.get(\"gyle_number\")\r\n invoice_details.append(gyle_number)\r\n quantity_ordered = request.args.get(\"quantity_ordered\")\r\n invoice_details.append(quantity_ordered)\r\n #Passing list to function which writes list to CSV file\r\n data_add(invoice_details)\r\n invoice_message = \"INVOICE ADDED\"\r\n return render_template(\"singular_message.html\",\r\n user_display=invoice_message)", "def test_add_recurring_schedule(self):\n pass", "def invoice_factory(session, contact):\n class InvoiceFactory():\n def get(self, invoice_number='2016-1', contact_alias='test',\n amount='5000', date='2016-03-05',\n sales_tax=19, afa=None,\n gwg=None, invoice_type='expense',\n invoice_file=None, invoice_extension=None):\n \"\"\"Return an Invoice.\"\"\"\n invoice = Invoice(\n invoice_number, contact_alias, amount, date,\n sales_tax=sales_tax, afa=afa,\n gwg=gwg, invoice_type=invoice_type,\n invoice_file=invoice_file, invoice_extension=invoice_extension)\n session.add(invoice)\n session.commit()\n return invoice\n return InvoiceFactory()", "def action_view_invoice_salon(self):\n return {\n 'name': 'Invoices',\n 'domain': [('invoice_origin', '=', self.name)],\n 'res_model': 'account.move',\n 'view_id': False,\n 'view_mode': 'tree,form',\n 'type': 'ir.actions.act_window',\n }", "def invoice(self, reference_no=None, with_vat=True):\n\n return self.invoice_class(apiobj=self, reference_no=reference_no)", "def abc_confirm_invoice(self, lines, packages, data, params, res):\n invoice = params.get('invoice')\n if invoice and invoice.state == 'draft':\n self.env.cr.commit()\n env = None\n try:\n # Ne cursor doesn't time out when requesting lock.\n # Could be bad I guess? Works for now.\n # TODO: Look into setting a more reasonable lock wait time.\n new_cr = Registry(self.env.cr.dbname).cursor()\n new_cr.autocommit(True)\n env = api.Environment(new_cr, self.env.uid, self.env.context)\n # Validate invoice\n invoice.signal_workflow('invoice_open')\n res['invoice']['name'] = invoice.number\n res['messages'].append(u\"Created and confirmed invoice %s.\" % invoice.number)\n res['results']['invoice'] = 'confirmed'\n # Commit to unlock the invoice sequence\n env.cr.commit()\n except Exception as e:\n res['warnings'].append((\n _(u\"Failed to confirm invoice %s!\") % (invoice and (invoice.number or invoice.name) or 'Unknown'),\n '%s\\n\\nTraceback:\\n%s' % (e.message or 'Unknown Error', traceback.format_exc())))\n finally:\n if env:\n env.cr.close()", "def obj_create(self, bundle, **kwargs):\n logger.info(\"Creating a new acknowledgement...\")\n #Create the object\n bundle.obj = Acknowledgement()\n #hydrate\n bundle = self.full_hydrate(bundle)\n \n #Set the customer\n try:\n logger.info(\"Setting customer...\")\n bundle.obj.customer = Customer.objects.get(pk=bundle.data[\"customer\"][\"id\"])\n bundle.obj.discount = bundle.obj.customer.discount\n except:\n logger.error(\"Customer with ID {0} could not be found.\".format(bundle.data['customer']['id']))\n raise\n \n #Set the employee\n try:\n logger.info(\"Setting employee...\")\n bundle.obj.employee = bundle.request.user\n except User.DoesNotExist:\n logger.error(\"User with ID {0} could not be found\".format(bundle.data['employee']['id']))\n raise\n except KeyError:\n logger.critical(\"Missing employee ID.\")\n raise\n \n #Set Status\n bundle.obj.status = \"ACKNOWLEDGED\"\n \n #Set the project or create a new one\n if \"project\" in bundle.data:\n try:\n project = Project.objects.get(pk=bundle.data['project']['id'])\n except KeyError, Project.DoesNotExist:\n try:\n project = Project()\n project.codename = bundle.data['project']['codename']\n project.save()\n except KeyError:\n project = None\n \n bundle.obj.project = project\n \n #Create items without saving them \n logger.info(\"Creating items...\")\n self.items = [Item.create(acknowledgement=bundle.obj,\n commit=False,\n **product) for product in bundle.data[\"items\"]]\n \n #Calculate the total price\n logger.info(\"Calculating balance of the order...\")\n bundle.obj.calculate_totals(self.items)\n bundle = self.save(bundle)\n \n #Save the items\n logger.info(\"Saving the items to the database...\")\n for item in self.items:\n item.acknowledgement = bundle.obj\n item.save()\n \n log_message = \"Ack {0} created on {1}. Schedule to be delivered on {1}\"\n log_message = log_message.format(bundle.obj.id,\n bundle.obj.time_created.strftime('%B %d, %Y'),\n bundle.obj.delivery_date.strftime('%B %d, %Y'))\n log = Log(message=log_message,\n delivery_date=bundle.obj.delivery_date,\n acknowledgement=bundle.obj)\n log.save()\n #Create and upload the pdfs to the \n #S3 system. The save the pdfs as\n #Attributes of the acknowledgement\n logger.info(\"Creating PDF documents...\")\n bundle.obj.create_and_upload_pdfs()\n \n \n #Add the url of the pdf to the outgoing data\n #only for when an acknowledgement is create\n try:\n ack = bundle.obj.acknowledgement_pdf\n production = bundle.obj.production_pdf\n bundle.data['pdf'] = {'acknowledgement': ack.generate_url(),\n 'production': production.generate_url()}\n except AttributeError: \n logger.warn('Missing acknowledgement or production pdf')\n \n #Conditionally email ack to Decoroom\n if \"decoroom\" in bundle.obj.customer.name.lower():\n try:\n logger.info(\"Emailing Decoroom Co., Ltd. the order details...\")\n bundle.obj.email_decoroom()\n except Exception as e:\n logger.error(\"Unable to mail decoroom.\")\n logger.error(e)\n \n \n \n logger.info(u\"Acknowledgement #{0} created for {1}\".format(bundle.obj.id, \n bundle.obj.customer.name)) \n return bundle", "def action_move_create(self):\n\t\taccount_move = self.env['account.move']\n\n\t\tfor inv in self:\n\t\t\tif not inv.journal_id.sequence_id:\n\t\t\t\traise UserError(_('Please define sequence on the journal related to this invoice.'))\n\t\t\tif not inv.invoice_line_ids.filtered(lambda line: line.account_id):\n\t\t\t\traise UserError(_('Please add at least one invoice line.'))\n\t\t\tif inv.move_id:\n\t\t\t\tcontinue\n\n\n\t\t\tif not inv.date_invoice:\n\t\t\t\tinv.write({'date_invoice': fields.Date.context_today(self)})\n\t\t\tif not inv.date_due:\n\t\t\t\tinv.write({'date_due': inv.date_invoice})\n\t\t\tcompany_currency = inv.company_id.currency_id\n\n\t\t\t# create move lines (one per invoice line + eventual taxes and analytic lines)\n\t\t\timl = inv.invoice_line_move_line_get()\n\t\t\timl += inv.tax_line_move_line_get()\n\n\t\t\tdiff_currency = inv.currency_id != company_currency\n\t\t\t# create one move line for the total and possibly adjust the other lines amount\n\t\t\ttotal, total_currency, iml = inv.compute_invoice_totals(company_currency, iml)\n\n\t\t\tname = inv.name or ''\n\t\t\tif inv.payment_term_id:\n\t\t\t\ttotlines = inv.payment_term_id.with_context(currency_id=company_currency.id).compute(total, inv.date_invoice)[0]\n\t\t\t\tres_amount_currency = total_currency\n\t\t\t\tfor i, t in enumerate(totlines):\n\t\t\t\t\tif inv.currency_id != company_currency:\n\t\t\t\t\t\tamount_currency = company_currency._convert(t[1], inv.currency_id, inv.company_id, inv._get_currency_rate_date() or fields.Date.today())\n\t\t\t\t\telse:\n\t\t\t\t\t\tamount_currency = False\n\n\t\t\t\t\t# last line: add the diff\n\t\t\t\t\tres_amount_currency -= amount_currency or 0\n\t\t\t\t\tif i + 1 == len(totlines):\n\t\t\t\t\t\tamount_currency += res_amount_currency\n\n\t\t\t\t\t_logger.info(inv)\n\t\t\t\t\timl.append({\n\t\t\t\t\t\t'type': 'dest',\n\t\t\t\t\t\t'name': name,\n\t\t\t\t\t\t'price': t[1],\n\t\t\t\t\t\t'account_id': inv.account_id.id,\n\t\t\t\t\t\t'date_maturity': t[0],\n\t\t\t\t\t\t'amount_currency': diff_currency and amount_currency,\n\t\t\t\t\t\t'currency_id': diff_currency and inv.currency_id.id,\n\t\t\t\t\t\t'invoice_id': inv.id,\n\t\t\t\t\t\t#'partner_id': inv.partner_line_id.id\n\t\t\t\t\t})\n\t\t\telse:\n\t\t\t\t_logger.info(inv)\n\t\t\t\ttotal_taxes_to_pay = self.return_tax_to_payy()\n\n\t\t\t\tif inv.taxes_collected_id.type_taxes == 'tax_company':\n\t\t\t\t\timl.append({\n\t\t\t\t\t'type': 'dest',\n\t\t\t\t\t'name': name,\n\t\t\t\t\t'price': total_taxes_to_pay,\n\t\t\t\t\t'account_id': inv.taxes_collected_id.account_id.id,\n\t\t\t\t\t'date_maturity': inv.date_due,\n\t\t\t\t\t'amount_currency': diff_currency and total_currency,\n\t\t\t\t\t'currency_id': diff_currency and inv.currency_id.id,\n\t\t\t\t\t'invoice_id': inv.id,\n\t\t\t\t\t#'partner_id': inv.partner_line_id.id\n\t\t\t\t\t})\n\t\t\t\t\timl.append({\n\t\t\t\t\t'type': 'dest',\n\t\t\t\t\t'name': name,\n\t\t\t\t\t'price': total- total_taxes_to_pay,\n\t\t\t\t\t'account_id': inv.account_id.id,\n\t\t\t\t\t'date_maturity': inv.date_due,\n\t\t\t\t\t'amount_currency': diff_currency and total_currency,\n\t\t\t\t\t'currency_id': diff_currency and inv.currency_id.id,\n\t\t\t\t\t'invoice_id': inv.id,\n\t\t\t\t\t#'partner_id': inv.partner_line_id.id\n\t\t\t\t\t})\n\n\t\t\t\telse:\n\t\t\t\t\timl.append({\n\t\t\t\t\t'type': 'dest',\n\t\t\t\t\t'name': name,\n\t\t\t\t\t'price': total,\n\t\t\t\t\t'account_id': inv.account_id.id,\n\t\t\t\t\t'date_maturity': inv.date_due,\n\t\t\t\t\t'amount_currency': diff_currency and total_currency,\n\t\t\t\t\t'currency_id': diff_currency and inv.currency_id.id,\n\t\t\t\t\t'invoice_id': inv.id,\n\t\t\t\t\t#'partner_id': inv.partner_line_id.id\n\t\t\t\t})\n\n\t\t\tpart = self.env['res.partner']._find_accounting_partner(inv.partner_id)\n\n\t\t\t#validamo que sea una factura de proveedor\n\t\t\tif self.type == 'in_invoice':\n\t\t\t\tdata_new = []\n\t\t\t\tfor l in iml:\n\t\t\t\t\tif 'partner_id' in l:\n\t\t\t\t\t\tif l['partner_id']:\n\t\t\t\t\t\t\tdata_new.append((0, 0, self.line_get_convert(l, l['partner_id'])) )\n\t\t\t\t\telse:\n\t\t\t\t\t\tdata_new.append((0, 0, self.line_get_convert(l, part.id)) )\n\n\t\t\t\tline = [l for l in data_new ]\n\t\t\telse:\n\t\t\t\tline = [(0, 0, self.line_get_convert(l, part.id)) for l in iml ]\n\n\t\t\tline = inv.group_lines(iml, line)\n\n\t\t\tline = inv.finalize_invoice_move_lines(line)\n\n\t\t\tdate = inv.date or inv.date_invoice\n\t\t\tmove_vals = {\n\t\t\t\t'ref': inv.reference,\n\t\t\t\t'line_ids': line,\n\t\t\t\t'journal_id': inv.journal_id.id,\n\t\t\t\t'date': date,\n\t\t\t\t'narration': inv.comment,\n\t\t\t}\n\n\t\t\tmove = account_move.create(move_vals)\n\t\t\t# Pass invoice in method post: used if you want to get the same\n\t\t\t# account move reference when creating the same invoice after a cancelled one:\n\t\t\tmove.post(invoice = inv)\n\t\t\t# make the invoice point to that move\n\t\t\tvals = {\n\t\t\t\t'move_id': move.id,\n\t\t\t\t'date': date,\n\t\t\t\t'move_name': move.name,\n\t\t\t}\n\t\t\tinv.write(vals)\n\t\treturn True", "def test_invoice(self):\n invoice = self._create_invoice()\n self.assertEquals(invoice.total_amount, Decimal('2.38'))\n self.assertEquals(invoice.is_paid, False)\n\n # then cancel the created invoice\n cancelled_invoice = cancel_invoice(invoice)\n self.assertEquals(cancelled_invoice.total_amount, Decimal('-2.38'))", "def action_move_create(self, cr, uid, ids, context=None):\n ait_obj = self.pool.get('account.invoice.tax')\n cur_obj = self.pool.get('res.currency')\n period_obj = self.pool.get('account.period')\n payment_term_obj = self.pool.get('account.payment.term')\n journal_obj = self.pool.get('account.journal')\n move_obj = self.pool.get('account.move')\n if context is None:\n context = {}\n for inv in self.browse(cr, uid, ids, context=context):\n if not inv.journal_id.sequence_id:\n raise osv.except_osv(_('Error!'), _('Please define sequence on the journal related to this invoice.'))\n if not inv.invoice_line:\n raise osv.except_osv(_('No Invoice Lines!'), _('Please create some invoice lines.'))\n if inv.move_id:\n continue\n\n ctx = context.copy()\n ctx.update({'lang': inv.partner_id.lang})\n if not inv.date_invoice:\n self.write(cr, uid, [inv.id], {'date_invoice': fields.date.context_today(self,cr,uid,context=context)}, context=ctx)\n company_currency = self.pool['res.company'].browse(cr, uid, inv.company_id.id).currency_id.id\n # create the analytical lines\n # one move line per invoice line\n iml = self._get_analytic_lines(cr, uid, inv.id, context=ctx)\n # check if taxes are all computed\n compute_taxes = ait_obj.compute(cr, uid, inv.id, context=ctx)\n self.check_tax_lines(cr, uid, inv, compute_taxes, ait_obj)\n\n # I disabled the check_total feature\n group_check_total_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account', 'group_supplier_inv_check_total')[1]\n group_check_total = self.pool.get('res.groups').browse(cr, uid, group_check_total_id, context=context)\n if group_check_total and uid in [x.id for x in group_check_total.users]:\n if (inv.type in ('in_invoice', 'in_refund') and abs(inv.check_total - inv.amount_total) >= (inv.currency_id.rounding/2.0)):\n raise osv.except_osv(_('Bad Total!'), _('Please verify the price of the invoice!\\nThe encoded total does not match the computed total.'))\n\n if inv.payment_term:\n total_fixed = total_percent = 0\n for line in inv.payment_term.line_ids:\n if line.value == 'fixed':\n total_fixed += line.value_amount\n if line.value == 'procent':\n total_percent += line.value_amount\n total_fixed = (total_fixed * 100) / (inv.amount_total or 1.0)\n if (total_fixed + total_percent) > 100:\n raise osv.except_osv(_('Error!'), _(\"Cannot create the invoice.\\nThe related payment term is probably misconfigured as it gives a computed amount greater than the total invoiced amount. In order to avoid rounding issues, the latest line of your payment term must be of type 'balance'.\"))\n\n # one move line per tax line\n iml += ait_obj.move_line_get(cr, uid, inv.id)\n\n entry_type = ''\n if inv.type in ('in_invoice', 'in_refund'):\n ref = inv.reference\n entry_type = 'journal_pur_voucher'\n if inv.type == 'in_refund':\n entry_type = 'cont_voucher'\n else:\n ref = self._convert_ref(cr, uid, inv.number)\n entry_type = 'journal_sale_vou'\n if inv.type == 'out_refund':\n entry_type = 'cont_voucher'\n\n diff_currency_p = inv.currency_id.id <> company_currency\n # create one move line for the total and possibly adjust the other lines amount\n total = 0\n total_currency = 0\n total, total_currency, iml = self.compute_invoice_totals(cr, uid, inv, company_currency, ref, iml, context=ctx)\n acc_id = inv.account_id.id\n\n name = inv['name'] or inv['supplier_invoice_number'] or '/'\n totlines = False\n # kittiu\n #if inv.payment_term:\n if inv.payment_term and not inv.date_due:\n # --\n totlines = payment_term_obj.compute(cr,\n uid, inv.payment_term.id, total, inv.date_invoice or False, context=ctx)\n if totlines:\n res_amount_currency = total_currency\n i = 0\n ctx.update({'date': inv.date_invoice})\n for t in totlines:\n if inv.currency_id.id != company_currency:\n amount_currency = cur_obj.compute(cr, uid, company_currency, inv.currency_id.id, t[1], context=ctx)\n else:\n amount_currency = False\n\n # last line add the diff\n res_amount_currency -= amount_currency or 0\n i += 1\n if i == len(totlines):\n amount_currency += res_amount_currency\n\n iml.append({\n 'type': 'dest',\n 'name': name,\n 'price': t[1],\n 'account_id': acc_id,\n 'date_maturity': t[0],\n 'amount_currency': diff_currency_p \\\n and amount_currency or False,\n 'currency_id': diff_currency_p \\\n and inv.currency_id.id or False,\n 'ref': ref,\n })\n else:\n iml.append({\n 'type': 'dest',\n 'name': name,\n 'price': total,\n 'account_id': acc_id,\n 'date_maturity': inv.date_due or False,\n 'amount_currency': diff_currency_p \\\n and total_currency or False,\n 'currency_id': diff_currency_p \\\n and inv.currency_id.id or False,\n 'ref': ref\n })\n\n date = inv.date_invoice or time.strftime('%Y-%m-%d')\n\n part = self.pool.get(\"res.partner\")._find_accounting_partner(inv.partner_id)\n\n line = map(lambda x:(0,0,self.line_get_convert(cr, uid, x, part.id, date, context=ctx)),iml)\n\n line = self.group_lines(cr, uid, iml, line, inv)\n\n journal_id = inv.journal_id.id\n journal = journal_obj.browse(cr, uid, journal_id, context=ctx)\n if journal.centralisation:\n raise osv.except_osv(_('User Error!'),\n _('You cannot create an invoice on a centralized journal. Uncheck the centralized counterpart box in the related journal from the configuration menu.'))\n\n line = self.finalize_invoice_move_lines(cr, uid, inv, line)\n\n move = {\n 'ref': inv.reference and inv.reference or inv.name,\n 'line_id': line,\n 'journal_id': journal_id,\n 'date': date,\n 'narration': inv.comment,\n 'company_id': inv.company_id.id,\n }\n period_id = inv.period_id and inv.period_id.id or False\n ctx.update(company_id=inv.company_id.id,\n account_period_prefer_normal=True)\n if not period_id:\n period_ids = period_obj.find(cr, uid, inv.date_invoice, context=ctx)\n period_id = period_ids and period_ids[0] or False\n if period_id:\n move['period_id'] = period_id\n for i in line:\n i[2]['period_id'] = period_id\n\n ctx.update(invoice=inv)\n move_id = move_obj.create(cr, uid, move, context=ctx)\n new_move_name = move_obj.browse(cr, uid, move_id, context=ctx).name\n # make the invoice point to that move\n self.write(cr, uid, [inv.id], {'move_id': move_id,'period_id':period_id, 'move_name':new_move_name}, context=ctx)\n # Pass invoice in context in method post: used if you want to get the same\n # account move reference when creating the same invoice after a cancelled one:\n move_obj.post(cr, uid, [move_id], context=ctx)\n self._log_event(cr, uid, ids)\n return True", "def save(self, *args, **kwargs):\n if not self.pk:\n self.start_time_rent = datetime.date.today()\n self.end_time_rent = self.start_time_rent + datetime.timedelta(days=7)\n self.reservation.isrented = True\n self.reservation.save()\n return super(Rental, self).save(*args, **kwargs)", "def schedule_reservation(reservation_date,reservation_time,party_size,restaurant_name,first_name,restaurant_address):\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n service = build('calendar', 'v3', credentials=creds)\n\n # Call the Calendar API\n now = datetime.datetime.utcnow()\n\n reservation_day=reservation_date.split('/')[0]\n reservation_month =reservation_date.split('/')[1]\n reservation_year =reservation_date.split('/')[2]\n reservation_date = reservation_year+'-'+reservation_month+'-'+reservation_day\n start_time_hr= reservation_time[:2]\n end_time_hr= int(reservation_time[:2])+4\n start_time_min= reservation_time[2:]\n end_time_min=start_time_min\n \n \n event = {\n 'summary': 'Reservation at '+restaurant_name,\n 'location': restaurant_address,\n 'description': 'Reservation for '+party_size+' under '+first_name+' made on '+str(now),\n 'start': {\n 'dateTime': reservation_date+'T'+start_time_hr+':'+start_time_min+':00+08:00',\n 'timeZone': 'Asia/Singapore',\n },\n 'end': {\n 'dateTime': reservation_date+'T'+str(end_time_hr)+':'+end_time_min+':00+08:00',\n 'timeZone': 'Asia/Singapore',\n },\n 'reminders': {\n 'useDefault': False,\n 'overrides': [\n {'method': 'email', 'minutes': 24 * 60},\n {'method': 'popup', 'minutes': 10},\n ],\n },\n }\n\n event = service.events().insert(calendarId='primary', body=event).execute()\n print ('Event created: %s', (event.get('htmlLink')))", "def invoice(self,context,params):\n url = f\"https://api.freshbooks.com/accounting/account/{params['account_id']}/invoices/invoices/{params['invoice_id']}\"\n result = json.loads(util.rest(\"GET\", url, {}, context[\"headers\"][\"access_token\"]).text)\n invoice = result[\"response\"][\"result\"][\"invoice\"]\n invoice_obj = FreshbooksInvoice(\n account_id=invoice['accountid'],\n customerid=invoice['customerid'], \n invoice_id=invoice['invoiceid'],\n currency_code=invoice['currency_code'],\n language=invoice['language'],\n terms=invoice['terms'],\n discount_value=invoice['discount_value'],\n discount_amount=invoice['discount_total']['amount'],\n invoice_number=invoice['invoice_number'],\n po_number=invoice['po_number'],\n amount=invoice['amount']['amount'],\n code=invoice['amount']['code'],\n create_date=invoice['create_date']\n )\n return invoice_obj.__dict__", "def test_get_invoice(self):\n invoice = Invoice(self.client, 123, {})\n\n self.assertEqual(invoice.date, datetime(2018, 1, 1, 0, 1, 1))\n self.assertEqual(invoice.id, 123)\n self.assertEqual(invoice.label, \"Invoice\")\n self.assertEqual(invoice.subtotal, 120.25)\n self.assertEqual(invoice.tax, 12.25)\n self.assertEqual(invoice.total, 132.5)\n self.assertIsNotNone(invoice.tax_summary)", "def create_purchase_order(self, cr, uid, ids, context=None):\n sale_obj = self.pool.get('sale.order')\n act_window = self.pool.get('ir.actions.act_window')\n wizard = self.browse(cr, uid, ids[0], context)\n sale_ids = context.get('active_ids', [])\n if wizard.advance_purchase_order == 'all':\n # create the final invoices of the active sales orders\n res = sale_obj.manual_purchase_order(cr, uid, sale_ids, context)\n \n return {'type': 'ir.actions.act_window_close'}\n\n if wizard.advance_purchase_order == 'lines':\n # open the list view of sales order lines to invoice\n res = act_window.for_xml_id(cr, uid, 'sale', 'action_order_line_tree2', context)\n res['context'] = {\n \n 'search_default_order_id': sale_ids and sale_ids[0] or False,\n }\n return res \n\n inv_ids = []\n for sale_id, inv_values in self._prepare_advance_po_vals(cr, uid, ids, context=context):\n inv_ids.append(self._create_purchase_order(cr, uid, inv_values, sale_id, context=context))\n\n \n return {'type': 'ir.actions.act_window_close'}", "def _prepare_invoice_line(self, qty):\n self.ensure_one()\n res = {\n 'name': self.name,\n 'sequence': self.sequence,\n 'origin': self.order_id.name,\n 'account_id': self.product_id.product_tmpl_id._get_product_accounts()['stock_input'].id,\n 'price_unit': self.price_unit,\n 'quantity': qty,\n 'uom_id': self.product_uom.id,\n 'product_id': self.product_id.id or False,\n 'invoice_line_tax_ids': [(6, 0, self.taxes_id.ids)],\n 'account_analytic_id': self.account_analytic_id.id,\n 'analytic_tag_ids': [(6, 0, self.analytic_tag_ids.ids)],\n }\n return res", "def create( self ):\r\n for rsrc in self.ee.getRsrcs( ):\r\n self.schedule[rsrc.getid( )] = [ ]", "def createCalendarEvent(self, ISBN, username):\n\n date = datetime.now()\n dueDate = (date + timedelta(days=7)).strftime(\"%Y-%m-%d\")\n time_start = \"{}T09:00:00+10:00\".format(dueDate)\n time_end = \"{}T10:00:00+10:00\".format(dueDate)\n eventID = ISBN+username.lower()\n\n event = {\n \"summary\": ISBN,\n \"id\": eventID,\n \"location\": \"RMIT Building 14\",\n \"description\": \"Book Due to be Returned\",\n \"start\": {\n \"dateTime\": time_start,\n \"timeZone\": \"Australia/Melbourne\",\n },\n \"end\": {\n \"dateTime\": time_end,\n \"timeZone\": \"Australia/Melbourne\",\n },\n \"attendees\": [\n {\"email\": \"[email protected]\"},\n {\"email\": \"[email protected]\"},\n ],\n \"reminders\": {\n \"useDefault\": False,\n \"overrides\": [\n {\"method\": \"email\", \"minutes\": 5},\n {\"method\": \"popup\", \"minutes\": 10},\n ],\n }\n }\n\n event = self.service.events().insert(\n calendarId=\"primary\", body=event).execute()\n return event['id']", "def invoice_line_create(self, invoice_id, qty):\n invoice_lines = self.env['account.invoice.line']\n precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')\n for line in self:\n if not float_is_zero(qty, precision_digits=precision):\n vals = line._prepare_invoice_line(qty=qty)\n vals.update({'invoice_id': invoice_id, 'purchase_line_id': line.id})\n invoice_lines |= self.env['account.invoice.line'].create(vals)\n return invoice_lines", "def create_new_schedule():\n\n # collect all relevant information from form\n user_id = int(session['user_id'])\n user = User.query.filter_by(user_id=int(session['user_id'])).one()\n contact_form_value = request.form.get('contact_id')\n start_date_unicode = request.form.get('start_date')\n period = int(request.form.get('period'))\n\n # extracts email from contact_form_value string using re library\n contact_email = contact_form_value.partition('<')[-1].rpartition('>')[0]\n\n # pull contact from database\n contact = Contact.query.filter_by(email=contact_email).one()\n contact_id = contact.contact_id\n\n # turns start_date into datetime object using dateutil library\n start_date = parser.parse(start_date_unicode)\n\n # calculates send_date from start_date and period\n send_date = start_date + datetime.timedelta(days=period)\n\n # write scheduled message to database\n new_scheduled_msg = ScheduledMessage(user_id=user_id, \n contact_id=contact_id,\n send_date=send_date,\n sent=False)\n\n # set new period on contact in database\n contact.contact_period = period\n\n db.session.add(new_scheduled_msg)\n db.session.commit()\n\n print 'user_id:', user_id\n print 'contact_form_value:', contact_form_value\n print 'start_date:', start_date, 'type:', type(start_date)\n print 'contact_email:', contact_email\n print 'contact:', contact\n print 'contact_id:', contact.contact_id\n print 'period:', period\n print 'send_date:', send_date\n return jsonify({})", "def test_create_new_order_VIES_fault(self):\n rup = baker.make(\n \"RecurringUserPlan\",\n user_plan__user__billinginfo__country=\"CZ\",\n user_plan__user__billinginfo__tax_number=\"CZ0123\",\n amount=10,\n tax=11,\n )\n with no_connection():\n order = rup.create_renew_order()\n self.assertEqual(order.tax, 11)", "def softm_to_invoice(rechnungsnr):\n from pprint import pprint\n\n if str(rechnungsnr).startswith('RG'):\n rechnungsnr = str(rechnungsnr)[2:]\n rg, orderlines = get_rechnung('RG833645')\n hint = {}\n for attr in 'skontobetrag'.split():\n hint[attr] = rg[attr]\n out = {'hint': hint}\n for attr in '''kundenauftragsnr auftragsnr versandkosten rechnung_steuranteil rechnungsnr\n zu_zahlen'''.split():\n out[attr] = rg[attr]\n\n out['leistungsdatum'] = rg['versand_date']\n out['kundennr'] = rg['kundennr_rechnungsempfaenger']\n out['erfasst_von'] = rg['sachbearbeiternr']\n out['abschlag_prozent'] = rg['auftragsrabatt1p'] + rg['auftragsrabatt2p']\n out['auftragsrabatt'] = rg['auftragsrabatt']\n out['rechungsdatum'] = rg['druck_date']\n rabatttext = ' und '.join([x for x in [rg['rabatttext1'].strip(), rg['rabatttext2'].strip()] if x])\n rabatttext = \"\"\n if rabatttext:\n rabatttext = \"%s: %f\" % (rabatttext, out['abschlag_prozent'])\n elif out['abschlag_prozent']:\n rabatttext = u\"Ab/Zuschläge: %f\" % (out['abschlag_prozent'])\n\n out['infotext_kunde'] = '\\n'.join([rabatttext])\n\n out['orderlines'] = []\n for ol in get_connection().query(['AFU00'], condition=\"FURGNR=%s\" % sql_escape(rechnungsnr)):\n pprint(ol)\n outol = {}\n for attr in '''menge artnr abschlag rechungsbetrag warenwert'''.split(): # zu_zahlen\n outol[attr] = ol[attr]\n out['orderlines'].append(outol)\n\n #line = dict(\n # guid=p.guid,\n # menge=int(p.menge),\n # artnr=p.artnr,\n # #kundenartnr=f3.artnr_kunde,\n # #name=f3.artikelbezeichnung.strip(),\n # infotext_kunde=p.text\n # #einzelpreis=int(abs(f3.verkaufspreis)*100),\n # #warenwert=int(p.wert_netto*100),\n # #zu_zahlen=int(abs(f3.wert_brutto)*100),\n # #abschlag=int(f4.positionsrabatt_gesamt*100)\n # )\n\n #if f3.ean and int(f3.ean):\n # line['ean'] = f3.ean", "def create_sequence(self, vals):\n\n # Creacion de secuencia. Si es de tipo payment o receipt\n # la secuencia la armamos de otra manera\n journal_type = vals['type']\n\n if journal_type not in ['receipt', 'payment']:\n return super().create_sequence(vals)\n\n # in account.journal code is actually the prefix of the sequence\n # whereas ir.sequence code is a key to lookup global sequences.\n prefix = vals['code'].upper()\n\n seq = {\n 'name': vals['name'],\n 'implementation': 'no_gap',\n 'prefix': prefix + '-',\n 'padding': 8,\n 'number_increment': 1\n }\n if 'company_id' in vals:\n seq['company_id'] = vals['company_id']\n sequence = self.env['ir.sequence'].create(seq)\n return sequence.id", "def invoice(self, amount: float, currency: int, shop_order_id: str,\n payway: str, extra_fields: dict or None = None) -> dict:\n\n required_fields = ['amount', 'currency', 'payway', 'shop_id', 'shop_order_id']\n req_dict = {\n \"amount\": amount,\n \"currency\": currency,\n \"shop_id\": self.shop_id,\n \"payway\": payway,\n \"shop_order_id\": shop_order_id\n }\n if extra_fields is not None:\n self._check_extra_fields_keys(extra_fields, req_dict)\n req_dict.update(extra_fields)\n req_dict.update({'sign': self._sign(req_dict, required_fields)})\n return super().post('invoice/create', req_dict, headers=self.headers)", "def _MakeCreateRequest(args, messages, resources, project,\n future_reservation_ref):\n future_reservation = util.MakeFutureReservationMessageFromArgs(\n messages, resources, args, future_reservation_ref)\n future_reservation.description = args.description\n future_reservation.namePrefix = args.name_prefix\n\n return messages.ComputeFutureReservationsInsertRequest(\n futureReservation=future_reservation,\n project=project,\n zone=future_reservation_ref.zone)", "def submit_invoices(self, **kwargs) -> ApiResponse:\n \n return self._request(kwargs.pop('path'), data=kwargs)", "def create_new_reservation():\n if not request.json:\n return jsonify({'error': 'no body supplied'}), 400\n\n # look up by date to see if any availability\n res_date = request.json.get('date', None)\n if not res_date:\n error = 'no reservation date supplied'\n flash(error, 'error')\n return jsonify({'error': error}), 400\n\n # check if res time present, if found, convert to DT object\n res_time = request.json.get('time', None)\n if not res_time:\n error = 'no reservation time supplied'\n flash(error, 'error')\n return jsonify({'error': error}), 400\n res_time = time_str_to_obj(res_time)\n\n open_inventory = session.query(Inventory).filter_by(date=res_date).all()\n if not open_inventory:\n error = 'no open inventory for date {}'.format(res_date)\n flash(error, 'error')\n return jsonify({'error': error})\n\n error = 'reservation invalid'\n for inv in open_inventory:\n for window in inv.windows:\n if window.current_res_count < window.max_res_count:\n # check if res date falls in current window\n window_start = time_str_to_obj(window.start_time)\n window_end = time_str_to_obj(window.end_time)\n\n # if requested res time is valid, update res count and save res\n if window_start <= res_time < window_end:\n window.current_res_count = window.current_res_count + 1\n session.add(window)\n\n res = Reservation(**request.json)\n session.add(res)\n resp = session.commit()\n if not resp:\n # send message to flask for creation by name\n flash('reservation for {} created'.format(request.json.get('name')), 'success')\n return jsonify({'message': 'reservation for {} created'.format(request.json.get('name'))})\n else:\n error = 'requested reservation time is not available in current inventory'\n else:\n error = 'current inventory window cannot accept additional reservations, please select different time'\n flash(error, 'error')\n return jsonify({'error': error}), 400", "def test_get_invoice(self):\n invoice = Invoice(self.client, 123456)\n self.assertEqual(invoice._populated, False)\n\n self.assertEqual(invoice.label, \"Invoice #123456\")\n self.assertEqual(invoice._populated, True)\n\n self.assertEqual(invoice.date, datetime(2015, 1, 1, 5, 1, 2))\n self.assertEqual(invoice.total, 9.51)", "def create_purchase_requestion(self, cr, uid, ids, context=None):\n #TODO change the state of the purchase requestion to quotes and let the wizard in specefic state \n purchase_requestion_obj = self.pool.get('ireq.m')\n exchange = self.pool.get('exchange.order').browse(cr, uid, context['active_id'])\n requestion_lines_obj = self.pool.get('ireq.products')\n prod = self.pool.get('product.product')\n wf_service = netsvc.LocalService(\"workflow\")\n if exchange.purchase_requestion_id:\n raise osv.except_osv(_('Warning'), _('You allredy create a purchase requestion for this exchange order '))\n for wizard in self.browse(cr, uid, ids):\n requestion_id = purchase_requestion_obj.create(cr, uid, {'company_id': exchange.company_id.id,\n 'user': context['uid'],\n 'cat_id':exchange.category_id.id or False,\n 'ir_ref': exchange.name, \n 'department_id' : exchange.department_id.id,\n 'exchane_order_id':[(4, exchange.id)],})\n for wizard_lines in wizard.products_ids:\n product = prod.browse(cr, uid,wizard_lines.product_id.id)\n requestion_lines_obj.create(cr, uid, {'pr_rq_id':requestion_id,\n 'product_id': wizard_lines.product_id.id,\n 'name': product.name,\n 'product_qty': wizard_lines.product_qty,\n 'product_uom': product.uom_po_id.id, \n 'desc': wizard_lines.description,})\n \n exchange.write({'purchase_requestion_id':requestion_id , 'state' : 'wait_purchase' }) \n wf_service.trg_validate(uid, 'ireq.m', requestion_id, 'draft', cr)\n return requestion_id", "def duplicate_invoice(invoice):\n from invoicer.models import Invoice\n from invoicer.models import LineItem\n\n # copy main attributes\n new_invoice = Invoice(\n company=invoice.company,\n invoice_date=datetime.now(),\n client=invoice.client,\n location=invoice.location,\n tax_rate=invoice.tax_rate,\n left_address=invoice.left_address,\n right_address=invoice.right_address,\n terms=invoice.terms,\n footer=invoice.footer\n )\n new_invoice.save()\n\n # now line items\n for line_item in invoice.line_items.all():\n new_invoice.line_items.add(LineItem(\n name=line_item.name,\n description=line_item.description,\n price=line_item.price,\n taxable=line_item.taxable,\n item=line_item.item,\n quantity=line_item.quantity\n ))\n\n return new_invoice", "def create_or_find_b2b_invoices_and_process_ept(self, row, sale_order, invoice_date, tax):\n\n vat_number = row.get('Buyer Tax Registration', False)\n invoice_number = row.get('VAT Invoice Number', False)\n\n invoices = sale_order.invoice_ids.filtered(\n lambda x: x.type == 'out_invoice' and x.state != 'cancel')\n if not invoices:\n lines = sale_order.order_line.filtered(lambda line: line.qty_to_invoice > 0)\n if not lines:\n return False\n invoices = sale_order._create_invoices()\n self.write({'invoice_ids': [(4, invoices and invoices.id)]})\n\n for invoice in invoices:\n if not invoice.partner_id.vat:\n invoice.partner_id.vat = vat_number\n\n payments_lines = []\n if invoice.invoice_payments_widget != 'false':\n payments_dict = json.loads(invoice.invoice_payments_widget)\n payments_content = payments_dict.get('content', [])\n for line in payments_content:\n payments_lines.append(line.get('payment_id', False))\n\n invoice_line = invoice.mapped('invoice_line_ids').filtered(\\\n lambda line: line.tax_ids != tax)\n if invoice_line:\n invoice.button_draft()\n invoice.write({'ref': invoice_number, 'date': invoice_date})\n\n if len(invoice_line) > 1:\n for line in invoice_line:\n line.with_context({'check_move_validity': False}).write( \\\n {'tax_ids': [(6, 0, [tax.id])]})\n else:\n invoice_line.with_context({'check_move_validity': False}).write( \\\n {'tax_ids': [(6, 0, [tax.id])]})\n\n invoice.with_context({'check_move_validity': False})._recompute_tax_lines( \\\n recompute_tax_base_amount=True)\n invoice.action_post()\n for line in payments_lines:\n invoice.js_assign_outstanding_line(line)\n\n return True", "def action_invoice_dian_resend(self):\n self.ensure_one()\n template = self.env.ref('l10n_co_e-invoice.email_template_edi_invoice_dian', False)\n compose_form = self.env.ref('mail.email_compose_message_wizard_form', False)\n ctx = dict(\n default_model='account.invoice',\n default_res_id=self.id,\n default_use_template=bool(template),\n default_template_id=template and template.id or False,\n default_composition_mode='comment',\n mark_invoice_as_sent=True,\n )\n return {\n 'name': _('Compose Email'),\n 'type': 'ir.actions.act_window',\n 'view_type': 'form',\n 'view_mode': 'form',\n 'res_model': 'mail.compose.message',\n 'views': [(compose_form.id, 'form')],\n 'view_id': compose_form.id,\n 'target': 'new',\n 'context': ctx,\n }", "def test_build__generate_ride_object(self) -> None:\n ride: dict = RecurringRideFactory.build()\n\n assert ride['ride'] is not None", "def build_invoice(payment_object):\n # Fill html template with the domain orders and user profile info\n html_template = get_template('billing/billing_invoice.html')\n rendered_html = html_template.render({\n 'payment': payment_object,\n 'user_profile': payment_object.owner.profile,\n })\n # Create pdf file from a html file\n pdfkit.from_string(rendered_html, '/tmp/out.pdf')\n with open(\"/tmp/out.pdf\", \"rb\") as pdf_file:\n pdf_raw = pdf_file.read()\n os.remove(\"/tmp/out.pdf\")\n return {\n 'body': pdf_raw,\n 'filename': 'invoice_{}.pdf'.format(payment_object.transaction_id),\n }", "def create_mass_schedule(self, cr, uid, context=None):\n\n sched_obj = self.pool.get('hr.schedule')\n ee_obj = self.pool.get('hr.employee')\n\n # Create a two-week schedule beginning from Monday of next week.\n #\n dt = datetime.today()\n days = 7 - dt.weekday()\n dt += relativedelta(days=+days)\n dStart = dt.date()\n dEnd = dStart + relativedelta(weeks=+2, days=-1)\n\n # Create schedules for each employee in each department\n #\n dept_ids = self.pool.get('hr.department').search(cr, uid, [],\n context=context)\n for dept in self.pool.get('hr.department').browse(cr, uid, dept_ids,\n context=context):\n ee_ids = ee_obj.search(cr, uid, [\n ('department_id', '=', dept.id),\n ], order=\"name\", context=context)\n if len(ee_ids) == 0:\n continue\n\n for ee in ee_obj.browse(cr, uid, ee_ids, context=context):\n\n if (not ee.contract_id\n or not ee.contract_id.schedule_template_id):\n continue\n\n sched = {\n 'name': (ee.name + ': ' + dStart.strftime('%Y-%m-%d') +\n ' Wk ' + str(dStart.isocalendar()[1])),\n 'employee_id': ee.id,\n 'template_id': ee.contract_id.schedule_template_id.id,\n 'date_start': dStart.strftime('%Y-%m-%d'),\n 'date_end': dEnd.strftime('%Y-%m-%d'),\n }\n sched_obj.create(cr, uid, sched, context=context)", "def _prepare_invoice_line(self, inv_id):\n res = {}\n account_id = self.product_id.property_account_income_id.id or self.product_id.categ_id.property_account_income_categ_id.id\n if not account_id:\n raise UserError(_('Please define income account for this product: \"%s\" (id:%d).') % \\\n (self.product_id.name, self.product_id.id,))\n price_unit = self.product_id.lst_price\n res = {\n 'invoice_id': inv_id.id,\n 'name': self.name,\n 'origin': self.order_id.name,\n 'account_id': account_id,\n 'uom_id': self.product_uom_id.id,\n 'quantity': self.product_uom_qty,\n 'price_unit': price_unit,\n 'product_id': self.product_id.id,\n 'invoice_line_tax_id': False,\n 'order_line_id': self.id\n }\n return res", "def _create_schedules(self):\n\n ''''''", "def generate_rapel(self):\n config = self.env['ka_hr_payroll.config'].default_config()\n last_period = self.get_last_period(self.status_id.id, self.company_payroll_id.id, config=config)\n if last_period:\n date_done = datetime.strptime(self.date_done, DATETIME_FORMAT)\n\n if date_done.day > config.date_end:\n date_pay = date_done + relativedelta(months=1)\n else:\n date_pay = date_done\n\n data_rapel = {\n 'new_period_id': self.id,\n 'old_period_id': last_period.id,\n 'date_start': get_utc_timezone(self.date_start + ' 00:00:00'),\n 'date_end': self.date_done,\n 'year_pay': str(date_pay.year),\n 'month_pay': date_pay.month,\n 'status_id': self.status_id.id,\n 'company_payroll_id': self.company_payroll_id.id,\n }\n\n rapel_period = self.env['ka_hr_payroll.rapel.tunjangan.khusus.period'].create(data_rapel)\n self.rapel_id = rapel_period\n\n for line in self.line_ids:\n line.generate_rapel(last_period.id, rapel_period.id)\n\n self.state_rapel = '2'\n self.env.user.notify_info(\"{0}, berhasil dibuat!\".format(rapel_period.name))\n else:\n raise ValidationError(\n \"Tunjangan khusus periode sebelumnya tidak ditemukan! Anda tidak bisa melanjutkan aksi ini.\")", "def create(self, validated_data):\n\n courier_obj = validated_data['courier_id']\n assigned_orders = Order.objects.filter(courier_id=courier_obj, complete_time__isnull=True)\n if assigned_orders.exists(): # if current delivery is not over, return uncompleted orders\n response = {\n 'orders': [{'id': order.order_id} for order in assigned_orders],\n 'assign_time': assigned_orders[0].assign_time.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-4] + 'Z'\n }\n return response\n regions = CourierRegions.objects.filter(courier_id=courier_obj).values_list('region', flat=True)\n capacity = courier_obj.courier_type.capacity\n orders = Order.objects.filter(weight__lte=capacity, region__in=regions, courier_id__isnull=True)\n current_time = datetime.now(timezone.utc)\n orders_to_assign = []\n working_hours = WorkingHours.objects.filter(courier_id=courier_obj)\n total_weight = 0\n for order in orders:\n if total_weight + order.weight > capacity:\n break\n delivery_hours = DeliveryHours.objects.filter(order_id=order)\n if work_delivery_intersect(working_hours, delivery_hours):\n orders_to_assign.append(order)\n total_weight += order.weight\n order.courier_id = courier_obj\n order.assign_time = current_time\n order.courier_type = courier_obj.courier_type\n order.delivery_complete = False\n order.save()\n response = {'orders': [{'id': order.order_id} for order in orders_to_assign]}\n if orders_to_assign:\n response['assign_time'] = current_time.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-4] + 'Z'\n return response", "def action_move_create(self):\n\n res = super(account_invoice, self).action_move_create()\n\n for inv in self:\n if not inv.move_id:\n return res\n for ml in inv.move_id.line_id:\n ml_vals = {\n 'emp_police': inv.pol_numpol,\n 'emp_quittance': inv.prm_numero_quittance,\n 'emp_effet': datetime.datetime.strptime(inv.prm_datedeb, '%Y-%m-%d').date() if inv.prm_datedeb else datetime.datetime.today(),\n 'emp_datech': datetime.datetime.strptime(inv.prm_datefin, '%Y-%m-%d').date() if inv.prm_datefin else datetime.datetime.today(),\n }\n ml.update(ml_vals)\n move_vals = {\n 'num_police': inv.pol_numpol,\n 'num_quittance': inv.prm_numero_quittance,\n 'date_effect': datetime.datetime.strptime(inv.prm_datedeb, '%Y-%m-%d').date() if inv.prm_datedeb else datetime.datetime.today(),\n 'date_end': datetime.datetime.strptime(inv.prm_datefin, '%Y-%m-%d').date() if inv.prm_datefin else datetime.datetime.today(),\n }\n inv.move_id.update(move_vals)\n self._log_event()\n return res", "def test_new_empty_invoice_address(self):\r\n self.original = self.env[\"res.partner\"].create({\r\n \"is_company\": False,\r\n \"type\": 'invoice',\r\n \"lastname\": \"\",\r\n \"firstname\": \"\"})", "def _prepare_invoice(self, invoice_type):\n return {\n 'partner_id': self.picking_id.partner_id.id,\n 'company_id': self.picking_id.company_id.id,\n 'type': invoice_type,\n 'name': _('Exchange Inv for %s') % self.picking_id.name,\n 'currency_id': self.env.user.company_id.currency_id.id,\n }", "def action_move_create(self):\n account_move = self.env[\"account.move\"]\n\n for request in self:\n if not request.journal_id:\n raise UserError(\n _(\n \"Please define a journal for this request.\"\n )\n )\n if not request.journal_id:\n raise UserError(\n _(\n \"Please define sequence on the journal related to this request.\"\n )\n )\n if any(\n request.approve_request_ids.filtered(\n lambda line: not line.account_id\n )\n ):\n raise UserError(\n _(\n \"There is a line without any account. Please configure a stock account \"\n \"for all product categories that have products on the lines\"\n )\n )\n if not request.approve_request_ids:\n raise UserError(_(\"Please add at least one line!\"))\n if request.move_id:\n continue\n\n company_currency = request.company_id.currency_id\n partner_id = request.end_user.user_id.partner_id.id\n iml = request.approve_request_line_move_line_get()\n name = request.name or \"\"\n credit = 0.0\n debit = reduce(\n lambda x, y: x + y, [line.get(\"credit\", 0.0) for line in iml]\n )\n\n iml.append(\n {\n \"name\": self.name or \"/\",\n \"account_id\": request.account_id.id,\n \"currency_id\": company_currency.id,\n \"date_maturity\": fields.Date.context_today(self),\n \"debit\": debit,\n \"credit\": credit,\n \"partner_id\": partner_id,\n }\n )\n\n iml = [(0, 0, line_item) for line_item in iml]\n move_vals = {\n \"ref\": request.name,\n \"line_ids\": iml,\n \"name\": self.name or \"/\",\n \"journal_id\": request.journal_id.id,\n \"date\": fields.Date.context_today(self),\n \"partner_id\": partner_id,\n \"narration\": request.name,\n }\n move = account_move.with_context(check_move_validity=False).create(\n move_vals\n )\n move.post()\n vals = {\n \"move_id\": move.id,\n \"move_name\": move.name,\n }\n request.write(vals)\n return True", "def income_report_gen(start, end):\n payments = get_income(start, end)\n row_title = [\"Name\", \"Boat\", \"Rent Day\", \"Pay Day\", \"Amount\"]\n data = []\n for payment in payments:\n temp = []\n for title, value in payment.items():\n temp.append(str(value))\n data.append(temp)\n row_format = \"{:>15}\" * (len(row_title)+1)\n print(row_format.format(\"\", *row_title))\n total_income = 0\n for i in range(len(data)):\n print(row_format.format(i+1, *data[i]))\n total_income += int(data[i][4])\n print(row_format.format(\"SUM\", *([\"--------------\"] * 4), str(total_income)))", "def parse_from_event(cls, payload):\n data = payload['data']['object']\n plan_info = data['lines']['data'][0]['plan']\n\n period_start_on = datetime.datetime.utcfromtimestamp(\n data['lines']['data'][0]['period']['start']).date()\n period_end_on = datetime.datetime.utcfromtimestamp(\n data['lines']['data'][0]['period']['end']).date()\n\n invoice = {\n 'payment_id': data['customer'],\n 'plan': plan_info['name'],\n 'receipt_number': data['receipt_number'],\n 'description': plan_info['statement_descriptor'],\n 'period_start_on': period_start_on,\n 'period_end_on': period_end_on,\n 'currency': data['currency'],\n 'tax': data['tax'],\n 'tax_percent': data['tax_percent'],\n 'total': data['total']\n }\n\n return invoice", "def test_invoice_detail(self):\n # first we create a customer\n id = self._create_model(\"customer\", self.customer_data, [\"name\", \"email\", \"phone\"])\n if id:\n # then we can create the invoice\n data = self.invoice_data\n data[\"customer_id\"] = id\n id_inv = self._create_model(\"invoice\", data, [])\n if id_inv:\n # then performing detail\n self._detail_model(\"invoice\", self.invoice_data, id, [])\n self.assertIsNotNone(id_inv)\n self.assertIsNotNone(id)", "def create_report_schedule(self, **kwargs):\n return CreateReportScheduleResponse(**self._request(kwargs.pop('path'), data=kwargs).json())", "def _action_procurement_create(self):\n precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')\n new_procs = self.env['procurement.order'] #Empty recordset\n for line in self:\n if line.state != 'sale' or not line.product_id._need_procurement():\n continue\n qty = 0.0\n for proc in line.procurement_ids:\n qty += proc.product_qty\n if float_compare(qty, line.product_uom_qty, precision_digits=precision) >= 0:\n continue\n\n if not line.order_id.procurement_group_id:\n vals = line.order_id._prepare_procurement_group()\n line.order_id.procurement_group_id = self.env[\"procurement.group\"].create(vals)\n\n vals = line._prepare_order_line_procurement(\n group_id=line.order_id.procurement_group_id.id)\n vals['product_qty'] = line.product_uom_qty - qty\n new_proc = self.env[\"procurement.order\"].with_context(\n procurement_autorun_defer=True,\n ).create(vals)\n # Do one by one because need pass specific context values\n new_proc.with_context(\n width=line.origin_width,\n height=line.origin_height).run()\n new_procs += new_proc\n return new_procs", "def test_sale_service(self):\n sale_order_vals = {\n 'partner_id': self.partner_usd.id,\n 'partner_invoice_id': self.partner_usd.id,\n 'partner_shipping_id': self.partner_usd.id,\n 'order_line': [(0, 0, {\n 'name': self.product_delivery_timesheet2.name,\n 'product_id': self.product_delivery_timesheet2.id,\n 'product_uom_qty': 50,\n 'product_uom': self.product_delivery_timesheet2.uom_id.id,\n 'price_unit': self.product_delivery_timesheet2.list_price\n }),\n ],\n 'pricelist_id': self.pricelist_usd.id,\n }\n sale_order = self.env['sale.order'].create(sale_order_vals)\n sale_order.order_line._compute_product_updatable()\n self.assertTrue(sale_order.order_line[0].product_updatable)\n sale_order.action_confirm()\n sale_order.order_line._compute_product_updatable()\n self.assertFalse(sale_order.order_line[0].product_updatable)\n self.assertEqual(sale_order.invoice_status, 'no', 'Sale Service: there should be nothing to invoice after validation')\n\n # check task creation\n project = self.project_global\n task = project.task_ids.filtered(lambda t: t.name == '%s:%s' % (sale_order.name, self.product_delivery_timesheet2.name))\n self.assertTrue(task, 'Sale Service: task is not created')\n self.assertEqual(task.partner_id, sale_order.partner_id, 'Sale Service: customer should be the same on task and on SO')\n # register timesheet on task\n self.env['account.analytic.line'].create({\n 'name': 'Test Line',\n 'project_id': project.id,\n 'task_id': task.id,\n 'unit_amount': 50,\n 'employee_id': self.employee_manager.id,\n })\n self.assertEqual(sale_order.invoice_status, 'to invoice', 'Sale Service: there should be sale_ordermething to invoice after registering timesheets')\n sale_order.action_invoice_create()\n line = sale_order.order_line\n self.assertTrue(line.product_uom_qty == line.qty_delivered == line.qty_invoiced, 'Sale Service: line should be invoiced completely')\n self.assertEqual(sale_order.invoice_status, 'invoiced', 'Sale Service: SO should be invoiced')\n self.assertEqual(sale_order.tasks_count, 1, \"A task should have been created on SO confirmation.\")\n\n # Add a line on the confirmed SO, and it should generate a new task directly\n product_service_task = self.env['product.product'].create({\n 'name': \"Delivered Service\",\n 'standard_price': 30,\n 'list_price': 90,\n 'type': 'service',\n 'invoice_policy': 'delivery',\n 'uom_id': self.env.ref('product.product_uom_hour').id,\n 'uom_po_id': self.env.ref('product.product_uom_hour').id,\n 'default_code': 'SERV-DELI',\n 'service_type': 'timesheet',\n 'service_tracking': 'task_global_project',\n 'project_id': project.id\n })\n\n self.env['sale.order.line'].create({\n 'name': product_service_task.name,\n 'product_id': product_service_task.id,\n 'product_uom_qty': 10,\n 'product_uom': product_service_task.uom_id.id,\n 'price_unit': product_service_task.list_price,\n 'order_id': sale_order.id,\n })\n\n self.assertEqual(sale_order.tasks_count, 2, \"Adding a new service line on a confirmer SO should create a new task.\")", "def test_create__params__future_recurring_ride(self, service: fixture) -> None:\n ride = RecurringRideFactory.create(service=service, future_recurring_ride=True)\n expected_pickup: str = ride['ride']['pickup']['timestamp']\n\n assert date.isoformat(date.today() + timedelta(days=1)) in expected_pickup", "def create(self):\n \n # create the sequence structure by calling the self.project.create\n self.project.create()", "def create_BOM_row(siteID,code,description,quantity,discount,unit_list,contract_term=1):\n global bom\n bom_row = {\n \"Site ID\":siteID,\n \"Code\":code,\n \"Description\": description,\n \"Quantity\": quantity * contract_term,\n \"Discount\": discount,\n \"Unit list\": unit_list,\n \"Unit net\": unit_list * (1 - discount)\n }\n bom_row[\"Total Due\"] = bom_row[\"Unit net\"] * quantity * contract_term\n return bom_row" ]
[ "0.69727474", "0.64852816", "0.6456974", "0.64502263", "0.6428501", "0.64010954", "0.63116646", "0.6295384", "0.6290846", "0.6239638", "0.6185124", "0.61704993", "0.6166009", "0.6165586", "0.6118279", "0.6118134", "0.5988207", "0.5985031", "0.594222", "0.5859622", "0.5817861", "0.58099025", "0.5807005", "0.57998943", "0.57708055", "0.57141364", "0.56990033", "0.569679", "0.5681464", "0.5671886", "0.56082046", "0.5600326", "0.5598877", "0.557701", "0.5569751", "0.5563605", "0.5547781", "0.553864", "0.5526813", "0.55251557", "0.55045795", "0.5481555", "0.54755735", "0.54337656", "0.5423832", "0.5421023", "0.5414769", "0.5413985", "0.54128104", "0.54127896", "0.54002", "0.5379018", "0.53781676", "0.53716063", "0.5362372", "0.5351404", "0.53474104", "0.53351307", "0.53265274", "0.5324349", "0.53242326", "0.53056496", "0.5275842", "0.52748454", "0.52604645", "0.52599156", "0.5254175", "0.52395093", "0.52144665", "0.5208813", "0.5185231", "0.5170688", "0.51210314", "0.5118729", "0.5103598", "0.5093981", "0.5093103", "0.5092513", "0.5091681", "0.5090888", "0.50900924", "0.5089719", "0.50868684", "0.5080658", "0.5075705", "0.5067259", "0.5055574", "0.50554097", "0.5055372", "0.5051232", "0.5048848", "0.5046626", "0.5033628", "0.502817", "0.50216645", "0.5012086", "0.5007557", "0.5005719", "0.50004506", "0.49993855" ]
0.7042408
0
Formats a table from a nested list, where the first index is the row.
def list_to_table(lst, titles, margins=3, sort=True): if sort: lst = sorted(lst) result = '' margins = [margins,] * len(titles) if not hasattr(margins, '__iter__') else margins # establish column widths widths = [] for i in range(len(titles)): widths.append(max([len(titles[i]),] + [len(row[i]) for row in lst]) + margins[i]) # a base format string for every line linebase = '' for w in widths: linebase += ('%%-%ss'%w) # make the header result += linebase % tuple(titles) + '\n' result += '-' * sum(widths) + '\n' # add the table data for row in lst: result += linebase % tuple(row) + '\n' return result.strip()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def format_table(table, format_str):\n ret = []\n for t in table:\n if type(t) is list:\n ret.append(format_table(t, format_str))\n else:\n ret.append(format_str.format(t))\n return ret", "def simple_format_table(table):\n s = [[str(e) for e in row] for row in table]\n lens = [max(map(len, col)) for col in zip(*s)]\n fmt = '\\t'.join('{{:{}}}'.format(x) for x in lens)\n table = [fmt.format(*row) for row in s]\n return '\\n'.join(table)", "def format_prettytable(table):\r\n for i, row in enumerate(table.rows):\r\n for j, item in enumerate(row):\r\n table.rows[i][j] = format_output(item)\r\n ptable = table.prettytable()\r\n ptable.hrules = FRAME\r\n ptable.horizontal_char = '.'\r\n ptable.vertical_char = ':'\r\n ptable.junction_char = ':'\r\n return ptable", "def tabulate(items: typing.List[str]):\n rows, columns = find_shape(len(items))\n extra = (rows * columns) - len(items)\n items += [' '] * extra\n items = [\n [f'{items[i][0]}-{items[i + columns - 1][0]}', *items[i:i + columns]]\n for i in range(0, len(items), columns)\n ]\n items = [[column[i] for column in items] for i in range(columns + 1)]\n items = ['| ' + ' | '.join(row) + ' |' for row in items]\n items.insert(1, ('| --- ' * rows) + '|')\n return '\\n'.join(items)", "def generate_table(rows):\n\n # - figure out column widths\n widths = [len(max(columns, key=len)) for columns in zip(*rows)]\n\n # - print the header\n header, data = rows[0], rows[1:]\n yield (\n ' | '.join(format(title, \"%ds\" % width) for width, title in zip(widths, header))\n )\n\n # Print the separator\n first_col = ''\n # - print the data\n for row in data:\n if first_col == '' and row[0] != '':\n # - print the separator\n yield '-+-'.join('-' * width for width in widths)\n first_col = row[0]\n\n yield (\n \" | \".join(format(cdata, \"%ds\" % width) for width, cdata in zip(widths, row))\n )", "def pretty_print_table(data, list_of_dicts):\n # ensure that each dict has the same set of keys\n keys = None\n for d in list_of_dicts:\n if keys is None:\n keys = d.keys()\n else:\n if d.keys() != keys:\n print(\"Error! not all dicts have the same keys!\")\n return\n header = \"\\t\" + \"\\t\".join(['{:11.10s}'] * len(data))\n header = header.format(*data)\n rows = []\n for k in keys:\n r = k + \"\\t\"\n for d in list_of_dicts:\n if type(d[k]) is float:\n r += '{:.9f}'.format(d[k]) + \"\\t\"\n else:\n r += '{:10.9s}'.format(str(d[k])) + \"\\t\"\n rows.append(r)\n print(header)\n for row in rows:\n print(row)", "def pretty_print_table(result, heading=False):\n # If the data is not in string, then it is likely a text format\n if type(result) == 'str':\n result = result.split('\\n')\n result = [line.split() for line in result]\n #Remove empty items\n result = [row for row in result if row!=['']]\n\n columns = len(result[0]) #Get the number of columns, this is used for row formatting\n row_format = '' #variable to construct the row formatting\n \n # Calculating the max length for each column\n for i in range(0, columns):\n # picking the length of the longest element\n #Need to convert the elements into string\n MAX_LEN = len(max([str(row[i]) for row in result], key=len))\n # Constructing the string formatting\n row_format += \"{:<\" + str(MAX_LEN) + \"} | \"\n\n pretty_result = ''\n if heading:\n pretty_result = row_format.format(*result[0]) + '\\n'\n pretty_result += len(row_format.format(*result[0])) * \"-\" + '\\n'\n result = result[1:]\n for row in result:\n pretty_result += row_format.format(*row) + '\\n'\n return pretty_result", "def print_table(self, items, fields):\r\n formats = []\r\n borders = []\r\n for f in fields:\r\n length = max(len(f),\r\n max([len(self.string(getattr(i, f))) for i in items]))\r\n justify = '>' if isinstance(getattr(\r\n items[0], f), int) or f == 'size' or f == 'reward' else '<'\r\n formats.append('{:' + justify + self.string(length + 2) + '}')\r\n borders.append('-' * length + ' ')\r\n row_format = u''.join(formats)\r\n headers = [f + ' ' for f in fields]\r\n print(row_format.format(*headers))\r\n print(row_format.format(*borders))\r\n for i in items:\r\n i_fields = [self.string(getattr(i, f)) + ' ' for f in fields]\r\n try:\r\n print(row_format.format(*i_fields))\r\n except UnicodeEncodeError:\r\n print(row_format.format(*i_fields).encode('utf-8'))", "def construct_table(self):\n table_str = self.header_row\n row_lbls, col_lbls = self.get_idxvals()\n for r,rlbl in enumerate(row_lbls):\n row_data = [self.data[rlbl,clbl] for clbl in col_lbls]\n table_str += self.construct_row(r, row_data)\n \n return table_str", "def print_list(objs, fields, formatters=None, sortby_index=0,\n mixed_case_fields=None, field_labels=None,\n normalize_field_names=False,\n table_label=None, print_header=True, print_border=True,\n print_row_border=False,\n out=sys.stdout):\n formatters = formatters or {}\n mixed_case_fields = mixed_case_fields or []\n field_labels = field_labels or fields\n if len(field_labels) != len(fields):\n raise ValueError(\"Field labels list %(labels)s has different number of\"\n \" elements than fields list %(fields)s\"\n % {\"labels\": field_labels, \"fields\": fields})\n\n kwargs = {}\n if sortby_index is not None:\n kwargs = {\"sortby\": field_labels[sortby_index]}\n\n if print_border and print_row_border:\n headers_horizontal_char = \"=\"\n kwargs[\"hrules\"] = prettytable.ALL\n else:\n headers_horizontal_char = \"-\"\n pt = prettytable.PrettyTable(field_labels)\n pt.align = \"l\"\n\n for o in objs:\n row = []\n for field in fields:\n if field in formatters:\n row.append(formatters[field](o))\n else:\n field_name = field\n\n if normalize_field_names:\n if field_name not in mixed_case_fields:\n field_name = field_name.lower()\n field_name = field_name.replace(\" \", \"_\").replace(\"-\", \"_\")\n\n if isinstance(o, dict):\n data = o.get(field_name, \"\")\n else:\n data = getattr(o, field_name, \"\")\n row.append(data)\n pt.add_row(row)\n\n if not print_border or not print_header:\n pt.set_style(prettytable.PLAIN_COLUMNS)\n pt.left_padding_width = 0\n pt.right_padding_width = 1\n\n table_body = pt.get_string(header=print_header,\n border=print_border,\n **kwargs) + \"\\n\"\n if print_border and print_row_border:\n table_body = table_body.split(\"\\n\", 3)\n table_body[2] = table_body[2].replace(\"-\", headers_horizontal_char)\n table_body = \"\\n\".join(table_body)\n\n table_header = \"\"\n\n if table_label:\n table_width = table_body.index(\"\\n\")\n table_header = make_table_header(\n table_label, table_width, horizontal_char=headers_horizontal_char)\n table_header += \"\\n\"\n\n if table_header:\n out.write(encodeutils.safe_encode(table_header).decode())\n out.write(encodeutils.safe_encode(table_body).decode())", "def table_format(row, header = False, width = 10):\n result = \"|\" + \"|\".join(str(entry).center(width) for entry in row) + \"|\"\n if header:\n l = len(result)\n result = result + \"\\n\" + \"|\" + (l-1) * \"-\"\n return result", "def prettyTable(self, heads, rows): \n # First calculate the maximum lengths for each column.\n lengths = map(len, heads)\n for row in rows:\n lengths = map(max, lengths, map(len, row))\n\n # Create a format string for the maximum lengths.\n formatString = (\"|{{:^{}}}\" * len(heads) + \"|\").format(*lengths)\n\n # Print the heads, then the contents.\n headLine = formatString.format(*heads)\n border = \"-\" * len(headLine)\n print(border)\n print(headLine)\n print(border)\n\n # Remake the format string right-justified.\n formatString = (\"|{{:>{}}}\" * len(heads) + \"|\").format(*lengths)\n for row in rows:\n print(formatString.format(*row))\n print(border)", "def tab_delim_table(self):\n self.generate()\n\n header = ' \\t '.join([r'{: ^7}'.format(col) for col in self.columns])\n lines = []\n for row in self.rows:\n bits = []\n for col in self.columns:\n if col in self.formatters:\n bits.append(self.formatters[col].format(row[col]))\n else:\n bits.append(self.formatters.get(col, '{: ^7}').format(row[col] if row[col] else ''))\n lines.append(' \\t '.join(bits))\n\n return \"{}\\n{}\".format(header, '\\n'.join(lines))", "def formatSimpleTable(data, stringify=True):\n\tif stringify:\n\t\tdata = [[str(v) for v in row] for row in data]\n\n\tif not data:\n\t\treturn \"\"\n\n\tcolWidthes = [max(len(row[colInd]) for row in data)\n\t\tfor colInd in range(len(data[0]))]\n\tfmtStr = \" \".join(\"%%%ds\"%w for w in colWidthes)\n\ttable = \"\\n\".join(fmtStr%tuple(row) for row in data)\n\treturn table", "def experiment_list_table_format(result):\n table = []\n for item in result:\n table.append(experiment_show_table_format(item))\n return table", "def print_table(table):\n rest = table[1:]\n fmt = \"%-28s %-9s %-16s %s\"\n for row in rest:\n print(fmt % tuple(row))", "def table(name, components):\n table = PrettyTable([name])\n table.align[name] = 'l'\n [table.add_row([component['name'][0:-5]]) for component in components]\n return table", "def print_table(rows, header=['Operation', 'OPS']):\n if len(rows) == 0:\n return\n col_max = [max([len(str(val[i])) for val in rows]) + 3 for i in range(len(rows[0]))]\n row_format = ''.join([\"{:<\" + str(length) + \"}\" for length in col_max])\n\n if len(header) > 0:\n print(row_format.format(*header))\n print(row_format.format(*['-' * (val - 2) for val in col_max]))\n\n for row in rows:\n print(row_format.format(*row))\n print(row_format.format(*['-' * (val - 3) for val in col_max]))", "def table(self, doc, level, output):\n output('<table border=\"1\" cellpadding=\"2\">\\n')\n for row in doc.getRows()[0]:\n output(\"<tr>\\n\")\n for column in row.getColumns()[0]:\n str = ('<%s colspan=\"%s\" align=\"%s\" valign=\"%s\">'\n % (column.getType(),\n column.getSpan(),\n column.getAlign(),\n column.getValign()))\n output(str)\n for c in column.getChildNodes():\n getattr(self, self.element_types[c.getNodeName()]\n )(c, level, output)\n output(\"</\" + column.getType() + \">\\n\")\n output(\"</tr>\\n\")\n output(\"</table>\\n\")", "def print_table(table, title_list):\n table.insert(0, title_list)\n for row_index, row in enumerate(table):\n for col_index, col in enumerate(row):\n if (type(col) == float) or (type(col) == int):\n table[row_index][col_index] = str(\"{0:,.2f}\".format(col))\n widths = [max(map(len, col)) for col in zip(*table)]\n sum_of_widths = sum(widths) + len(table[0]) * 3 - 1\n for row in table:\n print(\"-\" * sum_of_widths)\n print(\"|\" + \" \".join((val.ljust(width) + \"|\" for val, width in zip(row, widths))))\n print(\"-\" * sum_of_widths)", "def print_table(table, title_list):\n table.insert(0, title_list)\n for row_index, row in enumerate(table):\n for col_index, col in enumerate(row):\n if (type(col) == float) or (type(col) == int):\n table[row_index][col_index] = str(\"{0:,.2f}\".format(col))\n widths = [max(map(len, col)) for col in zip(*table)]\n sum_of_widths = sum(widths) + len(table[0]) * 3 - 1\n for row in table:\n print(\"-\" * sum_of_widths)\n print(\"|\" + \" \".join((val.ljust(width) + \"|\" for val, width in zip(row, widths))))\n print(\"-\" * sum_of_widths)", "def _format_table(fmt, headers, rows, colwidths, colaligns, is_multiline, rowaligns):\n lines = []\n hidden = fmt.with_header_hide if (headers and fmt.with_header_hide) else []\n pad = fmt.padding\n headerrow = fmt.headerrow\n\n padded_widths = [(w + 2 * pad) for w in colwidths]\n if is_multiline:\n pad_row = lambda row, _: row # noqa do it later, in _append_multiline_row\n append_row = partial(_append_multiline_row, pad=pad)\n else:\n pad_row = _pad_row\n append_row = _append_basic_row\n\n padded_headers = pad_row(headers, pad)\n padded_rows = [pad_row(row, pad) for row in rows]\n\n if fmt.lineabove and \"lineabove\" not in hidden:\n _append_line(lines, padded_widths, colaligns, fmt.lineabove)\n\n if padded_headers:\n append_row(lines, padded_headers, padded_widths, colaligns, headerrow)\n if fmt.linebelowheader and \"linebelowheader\" not in hidden:\n _append_line(lines, padded_widths, colaligns, fmt.linebelowheader)\n\n if padded_rows and fmt.linebetweenrows and \"linebetweenrows\" not in hidden:\n # initial rows with a line below\n for row, ralign in zip(padded_rows[:-1], rowaligns):\n append_row(\n lines, row, padded_widths, colaligns, fmt.datarow, rowalign=ralign\n )\n _append_line(lines, padded_widths, colaligns, fmt.linebetweenrows)\n # the last row without a line below\n append_row(\n lines,\n padded_rows[-1],\n padded_widths,\n colaligns,\n fmt.datarow,\n rowalign=rowaligns[-1],\n )\n else:\n separating_line = (\n fmt.linebetweenrows\n or fmt.linebelowheader\n or fmt.linebelow\n or fmt.lineabove\n or Line(\"\", \"\", \"\", \"\")\n )\n for row in padded_rows:\n # test to see if either the 1st column or the 2nd column (account for showindex) has\n # the SEPARATING_LINE flag\n if _is_separating_line(row):\n _append_line(lines, padded_widths, colaligns, separating_line)\n else:\n append_row(lines, row, padded_widths, colaligns, fmt.datarow)\n\n if fmt.linebelow and \"linebelow\" not in hidden:\n _append_line(lines, padded_widths, colaligns, fmt.linebelow)\n\n if headers or rows:\n output = \"\\n\".join(lines)\n if fmt.lineabove == _html_begin_table_without_header:\n return JupyterHTMLStr(output)\n else:\n return output\n else: # a completely empty table\n return \"\"", "def tabulate(table):\n cw = {} # column widths\n\n # Trim leading and trailing whitespace from each element.\n for i, row in enumerate(table):\n for j, element in enumerate(row):\n table[i][j] = element.strip()\n\n # Find the max element width for each column.\n for row in table:\n for j, element in enumerate(row):\n cw[j] = max(cw.get(j, 0), len(element))\n\n # Reformat elements to align columns.\n for i, row in enumerate(table):\n for j, element in enumerate(row):\n table[i][j] = ' ' + element.ljust(cw[j]) + ' '", "def tabulate(\n headers: List[str],\n rows: List[Dict[str, str]],\n header_labels: Optional[Dict[str, str]] = None,\n) -> str:\n col_widths = {}\n\n def label(name) -> str:\n label = (header_labels or {}).get(name, \"\")\n if label:\n return label\n return str(name.upper())\n\n def field(obj, name) -> str:\n return str(obj.get(name, \"\"))\n\n for name in headers:\n col_widths[name] = len(label(name))\n for row in rows:\n for name in headers:\n col_widths[name] = max(len(field(row, name)), col_widths[name])\n\n format_string = \"\"\n for col_width in col_widths.values():\n if format_string:\n format_string += \" \"\n format_string += \"{:<%d}\" % col_width\n\n output = format_string.format(*[label(name) for name in headers])\n for row in rows:\n output += \"\\n\"\n output += format_string.format(*[field(row, name) for name in headers])\n return output", "def build_markdown_table(header, metadata, sorted_keys, row_format):\n table_md = _print_row(header)\n table_md += _print_row(['-' * len(h) for h in header])\n\n for name in sorted_keys:\n d = metadata[name]\n\n # single-argument callable that returns a string is used for conditional formats\n # e.g. to only print link if provided, define\n # lambda d: '[Link]({_link})' if d[_link] else ''\n row = [r(d).format(**d) if callable(r) else r.format(**d) for r in row_format]\n table_md += _print_row(row)\n\n return table_md", "def table_format(row, header = False, width = 20):\n result = \"|\" + \"|\".join(str(entry).center(width) for entry in row) + \"|\"\n if header:\n result = result + \"\\n\" + \"|\" + \"|\".join([width * \"=\" for _ in row]) + \"|\"\n return result", "def display_table(dict_list=None, user_config_data=None):\r\n if user_config_data is not None:\r\n # print(tabulate.tabulate(user_config_data, headers=['Variable', 'Value'], tablefmt=\"grid\"))\r\n print(tabulate.tabulate(user_config_data, tablefmt=\"grid\"))\r\n return\r\n\r\n header = [\"idx\"] + list(dict_list[0].keys())\r\n rows = [[idx + 1] + list(x.values()) for idx, x in enumerate(dict_list)]\r\n print(tabulate.tabulate(rows, header, tablefmt=\"grid\"))", "def display_table(dict_list=None, user_config_data=None):\r\n if user_config_data is not None:\r\n # print(tabulate.tabulate(user_config_data, headers=['Variable', 'Value'], tablefmt=\"grid\"))\r\n print(tabulate.tabulate(user_config_data, tablefmt=\"grid\"))\r\n return\r\n\r\n header = [\"idx\"] + list(dict_list[0].keys())\r\n rows = [[idx + 1] + list(x.values()) for idx, x in enumerate(dict_list)]\r\n print(tabulate.tabulate(rows, header, tablefmt=\"grid\"))", "def generate_table(self, rows):\n ...", "def print_table(listx):\r\n\tfor lists in listx:\r\n\t\tfor i in lists:\r\n\t\t\tprint str(i) , '\\t',\r\n\t\tprint()", "def printAsTextTable(self, format_type, text, template=False):\n\n # the order is defined by header list\n col_paddings = []\n message = \"\"\n\n if format_type == \"text\":\n col = rcol = lcol = ecol = tbcol = tecol = bcol = tcol = \"|\"\n row = \"+\"\n space = \"\"\n for name in self.table_header:\n pad = self.getWidth(text[name] + [name, ])\n col_paddings.append(pad)\n for i in range(pad):\n row = \"%s-\" % (row)\n row = \"%s-+\" % (row)\n ecol = \"%s\\n%s\" % (ecol, row)\n tecol = \"%s\\n%s\" % (tecol, row)\n message = \"%s\\n\" % (row,)\n else:\n for name in self.table_header:\n col_paddings.append(0)\n if format_type == \"csv\":\n col = \",\"\n bcol = ecol = tecol = tbcol = \"\"\n tcol = rcol = lcol = \",\"\n row = \"\"\n space = \"\"\n if format_type == \"html\":\n col = \"</td>\\n<td align=center>\"\n tbcol = \"<tr><th align=center>\"\n tecol = \"</th></tr>\"\n tcol = \"</th><th align=center>\"\n rcol = \"</td>\\n<td align=right>\"\n lcol = \"</td>\\n<td align=left>\"\n bcol = \"<tr><td align=left>\"\n ecol = \"</td></tr>\"\n space = \"&nbsp;\"\n\n if not template and format_type != \"html\":\n line = \"\"\n for i in range(len(self.table_header)):\n pad = col_paddings[i]\n column = self.table_header[i].center(pad + 1)\n if i == 0:\n line = column\n else:\n line = \"%s%s%s\" % (line, tcol, column)\n message = \"%s%s%s%s\\n\" % (message, tbcol, line, tecol)\n\n for count in range(0, self.getLength(text)):\n index = 0\n line = bcol\n for key in self.table_header:\n item = text[key][count]\n separator = lcol\n if format_type != \"csv\" and (\n type(item) == type(0) or type(item) == type(0.0)):\n separator = rcol\n nv = NiceNum.niceNum(item, 1)\n value = nv.rjust(col_paddings[index] + 1)\n else:\n if type(item) == type(0) or type(item) == type(0.0):\n value = repr(item).rjust(col_paddings[index] + 1)\n else:\n value = item.ljust(col_paddings[index] + 1)\n if format_type == \"html\" and len(item.strip()) == 0:\n value = space\n if line == bcol:\n line = \"%s%s\" % (line, value)\n else:\n line = \"%s%s%s\" % (line, separator, value)\n index += 1\n line = \"%s%s\" % (line, ecol)\n message = \"%s%s\\n\" % (message, line)\n\n return message", "def test_tabular_format_output_wrapper():\n data = [[\"1\", None], [\"2\", \"Sam\"], [\"3\", \"Joe\"]]\n headers = [\"id\", \"name\"]\n expected = dedent(\n \"\"\"\\\n +----+------+\n | id | name |\n +----+------+\n | 1 | N/A |\n | 2 | Sam |\n | 3 | Joe |\n +----+------+\"\"\"\n )\n\n assert expected == \"\\n\".join(\n format_output(iter(data), headers, format_name=\"ascii\", missing_value=\"N/A\")\n )", "def workspace_list_table_format(result):\n table = []\n for item in result:\n table.append(workspace_show_table_format(item))\n return table", "def tabular_table(word_list=None, field_width=26, line_length=78, output_separator=\" \", truncate_elements=True):\n if not word_list:\n word_list = list()\n elements = [ANSIString(entry) for entry in word_list]\n if truncate_elements:\n elements = [entry[:field_width] for entry in elements]\n elements = [entry.ljust(field_width) for entry in elements]\n separator_length = len(output_separator)\n per_line = line_length / (field_width + separator_length)\n result_string = ANSIString(\"\")\n count = 0\n total = len(elements)\n for num, element in enumerate(elements):\n count += 1\n if count == 1:\n result_string += element\n elif count == per_line:\n result_string += output_separator\n result_string += element\n if not num+1 == total:\n result_string += '\\n'\n count = 0\n elif count > 1:\n result_string += output_separator\n result_string += element\n return result_string", "def prettytable(self):\r\n table = PrettyTable(self.columns)\r\n if self.sortby:\r\n table.sortby = self.sortby\r\n for a_col, alignment in self.align.items():\r\n table.align[a_col] = alignment\r\n\r\n # Adding rows\r\n for row in self.rows:\r\n table.add_row(row)\r\n return table", "def generate_table(results):\n keyslist = list(results[0].keys())\n table = PrettyTable(keyslist)\n for dct in results:\n table.add_row([dct.get(c, \"\") for c in keyslist])\n return table", "def print_table(table):\r\n print('/-----------------------------------------------------------------------------------\\\\')\r\n for item in table:\r\n\r\n while len(item[1]) <= 22:\r\n item[1] += ' '\r\n\r\n while len(item[2]) <= 27:\r\n item[2] += ' '\r\n\r\n while len(item[0]) <= 15:\r\n item[0] += ' '\r\n\r\n print('| '+item[0]+' | '+item[1]+'| '+item[2]+' |')\r\n\r\n print('\\\\-----------------------------------------------------------------------------------/')", "def print_table(table):\n for row in table:\n # Header column left justified\n print(\"{:<19}\".format(row[0]), end='')\n # Remaining columns right justified\n for col in row[1:]:\n print(\"{:>4}\".format(col), end='')\n print(\"\", end='\\n')", "def make_table(header, align_map=None, rows=None):\n t = PrettyTable()\n t.horizontal_char = t.vertical_char = t.junction_char = ' '\n t.field_names = header\n if align_map:\n for field, align in zip(header, align_map):\n t.align[field] = align\n if rows:\n for row in rows:\n if len(row) < len(t.field_names):\n continue\n try:\n t.add_row(row)\n except Exception as err:\n print_('fields:', t.field_names)\n print_('row:', row)\n print_('rows:', rows)\n raise err\n return t", "def write_table(*lists):\n print(\"<table>\")\n for columns in zip(*lists):\n print(\"<tr>\")\n for val in columns:\n print(\"<td>{}</td>\".format(val))\n print(\"</tr>\")\n print(\"</table>\")", "def print_table(table):\n for i in range(len(table)):\n print \"Row \", i, \"\\t\",\n for j in range(len(table[i])):\n print table[i][j],\n print \"\\n\"", "def make_table(self, content):\n html = '<table class=\"table table-condensed\">'\n\n # Check for list or tuple\n if type(content) is list or type(content) is tuple:\n if len(content) > 0:\n # If first item in list is dictionary continue\n if type(content[0]) is dict:\n # Make table header for every key\n html += '<thead><tr>'\n for key in content[0].keys():\n html += '<th>' + key + '</th>'\n html += '</tr></thead>'\n\n # Make table body\n html += '<tbody>'\n for dictonary in content:\n # New table row for every dict item in list\n html += '<tr>'\n # New column for every value in dictionary\n for value in dictonary.values():\n html += '<td>' + str(value) + '</td>'\n html += '</tr>'\n html += '</tbody>'\n else:\n html += 'No content available'\n\n html += '</table>'\n\n self.table = html\n\n return html", "def generate_table(columns, rows, plain=False, sort=None, reversesort=False):\n tbl = PrettyTable(columns)\n tbl.set_style(PLAIN_COLUMNS if plain else DEFAULT)\n tbl.header = not plain\n [tbl.add_row(x) for x in rows]\n tbl.align = 'l'\n\n if sort:\n tbl.sortby = sort\n\n tbl.reversesort = reversesort\n\n return tbl", "def get_table(header, floatPercission=4, *rows):\n\n table = PrettyTable(header)\n table.padding_width = 1\n for row in rows:\n # go through row and round floats\n for i in xrange(len(row)):\n if type(row[i]) is float:\n row[i] = round(row[i], floatPercission)\n table.add_row(row)\n return table", "def job_list_table_format(result):\n table = []\n for item in result:\n table.append(job_show_table_format(item))\n return table", "def write(self, stream, root, order):\n stream.write('[table]\\n')\n if root and isinstance(root[0], dict):\n self.markup(stream, order, '[tr][th]', '[/th][th]', '[/th][/tr]')\n for row in root:\n self.markup(stream, [row.get(col, '') for col in order], '[tr][td]', '[/td][td]', '[/td][/tr]')\n else:\n for row in root:\n self.markup(stream, row, '[tr][td]', '[/td][td]', '[/td][/tr]')\n stream.write('[/table]\\n')", "def tabular_formatted_printing(data_list):\n n = len(data_list)\n max = 0\n for i in range(0,n):\n if int(len(data_list[i][0])) > max:\n max = len(data_list[i][0])\n for i in range(0,n):\n if int(len(data_list[i][0])) < max:\n space = max - len(data_list[i][0])\n else:\n space = 0\n print(data_list[i][0]+space*' '+' : '+str(data_list[i][1]))\n return", "def _pretty_table_line(self, items):\n padded_strings = []\n for i, s in enumerate(items):\n padding_value = self._padding_values[i]\n padded_strings.append('{:<{}s}'.format(str(s), padding_value))\n return \" \" + \"| \".join(padded_strings)", "def print_table(data_array, column_tag, row_tag, \n print_format = \"1.2f\", \n with_color_cell = True,\n colormap='Greys', colorscale=0.5, colorwrap=0, col_sep='', \n print_latex_table=True, print_text_table=True,\n print_format_along_row=True):\n if column_tag is None:\n column_tag = [\"\" for data in data_array[0, :]]\n if row_tag is None:\n row_tag = [\"\" for data in data_array]\n\n # check print_format\n if type(print_format) is not list:\n if print_format_along_row:\n # repeat the tag\n print_format = [print_format for x in row_tag]\n else:\n print_format = [print_format for x in column_tag]\n else:\n if print_format_along_row:\n assert len(print_format) == len(row_tag)\n else:\n assert len(print_format) == len(column_tag)\n\n\n # color configuration\n color_func = cm.get_cmap(colormap)\n data_idx = return_valid_number_idx(data_array)\n value_min = np.min(data_array[data_idx])\n value_max = np.max(data_array[data_idx])\n \n def get_latex_color(x):\n # return a color command for latex cell\n return return_latex_color_cell(x, value_min, value_max, \n colorscale, colorwrap, color_func)\n \n # maximum width for tags in 1st column\n row_tag_max_len = max([len(x) for x in row_tag])\n\n # maximum width for data and tags for other columns\n if print_format_along_row:\n tmp_len = []\n for idx, data_row in enumerate(data_array):\n tmp_len.append(\n max([len(\"{num:{form}}\".format(num=x, form=print_format[idx])) \\\n for x in data_row]))\n else:\n tmp_len = []\n for idx, data_col in enumerate(data_array.T):\n tmp_len.append(\n max([len(\"{num:{form}}\".format(num=x, form=print_format[idx])) \\\n for x in data_col]))\n col_tag_max_len = max([len(x) for x in column_tag] + tmp_len)\n \n # prepare buffer\n text_buffer = \"\"\n latex_buffer = \"\"\n \n # latex head\n latex_buffer += r\"\\begin{tabular}{\" \\\n + ''.join(['c' for x in column_tag + ['']]) + r\"}\" + \"\\n\"\n \n # head row\n # for latex\n hrow = [fill_cell(\"\", row_tag_max_len)] \\\n + [fill_cell(x, col_tag_max_len) for x in column_tag]\n latex_buffer += return_one_row_latex(hrow)\n # for plain text (add additional separator for each column)\n hrow = [fill_cell(\"\", row_tag_max_len, col_sep)] \\\n + [fill_cell(x, col_tag_max_len, col_sep) for x in column_tag]\n text_buffer += return_one_row_text(hrow)\n \n # contents\n row = data_array.shape[0]\n col = data_array.shape[1]\n for row_idx in np.arange(row):\n # row head\n row_content_latex = [fill_cell(row_tag[row_idx], row_tag_max_len)]\n row_content_text = [fill_cell(row_tag[row_idx],row_tag_max_len,col_sep)]\n \n # each column in the raw\n for col_idx in np.arange(col):\n\n if print_format_along_row:\n tmp_print_format = print_format[row_idx]\n else:\n tmp_print_format = print_format[col_idx]\n\n if is_valid_float(data_array[row_idx,col_idx]):\n num_str = \"{num:{form}}\".format(num=data_array[row_idx,col_idx],\n form=tmp_print_format)\n latex_color_cell = get_latex_color(data_array[row_idx,col_idx])\n elif type(data_array[row_idx,col_idx]) is str:\n num_str = \"{num:{form}}\".format(num=data_array[row_idx,col_idx],\n form=tmp_print_format)\n latex_color_cell = ''\n else:\n num_str = ''\n latex_color_cell = ''\n \n if not with_color_cell:\n latex_color_cell = ''\n \n row_content_text.append(\n fill_cell(num_str, col_tag_max_len, col_sep))\n\n row_content_latex.append(\n fill_cell(latex_color_cell + ' ' + num_str, col_tag_max_len))\n \n # latex table content\n latex_buffer += return_one_row_latex(row_content_latex)\n # text content\n text_buffer += return_one_row_text(row_content_text)\n \n latex_buffer += r\"\\end{tabular}\" + \"\\n\"\n\n if print_latex_table:\n print(latex_buffer)\n if print_text_table:\n print(text_buffer)\n return", "def get_formatted_table(images):\n header = ['CEE (Windows)']\n table = columned_table(header, [images])\n return table", "def print_table(table):\n # transpose the table:\n table = map(list, zip(*table))\n # get the column width:\n col_width = [max(len(str(x)) for x in col) for col in zip(*table)]\n # print it to screen:\n print\n for line in table:\n print \"| \" + \" | \".join(\"{:{}}\".format(x, col_width[i]) for i, x in enumerate(line)) + \" |\"\n print", "def myformat(table):\n m = 0\n table = sorted(table, key=itemgetter(0))\n for t in table:\n t = str(t)\n if len(t[0]) > m:\n m = len(t[0])\n m += 10\n fstr = \"{0:}\" + m*\" \" + \"{1:}\"\n s = \"\"\n for x in table:\n try:\n a = float(x[0])\n b = float(x[1])\n s += \"{0:.5f}{1:{width}}\".format(a, b, width=m) + \"\\n\"\n except IndexError:\n pass\n return s\n \"\"\"\n out = \"\"\n for pair in table:\n out += str(pair[0]) + 5*\" \" + str(pair[1]) + \"\\n\"\n return out\"\"\"", "def pprint(table, truncate=40, padding=\" \", fill=\".\"):\n # Calculate the width of each column, based on the longest field in each column.\n # Long fields can be split across different lines, so we need to check each line.\n w = [0 for column in table.columns]\n R = []\n for i, row in enumerate(table.rows):\n fields = []\n for j, v in enumerate(row):\n # Cast each field in the row to a string.\n # Strings that span beyond the maximum column width are wrapped.\n # Thus, each \"field\" in the row is a list of lines.\n head, tail = _truncate(decode_utf8(v), truncate)\n lines = []\n lines.append(head)\n w[j] = max(w[j], len(head))\n while len(tail) > 0:\n head, tail = _truncate(tail, truncate)\n lines.append(head)\n w[j] = max(w[j], len(head))\n fields.append(lines)\n R.append(fields)\n for i, fields in enumerate(R):\n # Add empty lines to each field so they are of equal height.\n n = max([len(lines) for lines in fields])\n fields = [lines+[\"\"] * (n-len(lines)) for lines in fields]\n # Print the row line per line, justifying the fields with spaces.\n for k in range(n):\n for j, lines in enumerate(fields):\n s = lines[k]\n s += ((k==0 or len(lines[k]) > 0) and fill or \" \") * (w[j] - len(lines[k])) \n s += padding\n print s,\n print", "def print_table(self):\n print(\"%-12s%-12s%-12s%-12s%-12s\" % (\"index\",\"balance\",\"payment\",\"interest\",\"amortization\"))\n print(\"-------------------------------------------------------------\")\n for i in self.table[\"index\"]:\n print(\"%-12i%-12i%-12i%-12i%-12i\" % (self.table[\"index\"][i],self.table[\"balance\"][i]\\\n ,self.table[\"payment\"][i],self.table[\"interest\"][i],\\\n self.table[\"amortization\"][i]))", "def format(self, table):\n #return table.data.to_json()\n m = table.as_array()\n rank = len(m.shape)\n is_table = len(table.headers)<=5 or (len(table.headers)>5 and (table.headers[0] != '0' or table.headers[1] != '1' or table.headers[2] != '2' ))\n\n if rank<3 and is_table:\n v = []\n for i in range(len(table.headers)):\n vv = {\n 'offset': table.offset,\n 'header': table.headers[i],\n 'type': table.types[i],\n 'data': _replace_nans(m[:,i].tolist()) if rank>1 else _replace_nans(m.tolist()),\n }\n if table.sizes is not None:\n vv[\"size\"] = table.sizes[0]\n v.append(vv)\n else:\n # if hasattr(data, \"strip\") or \\\n # (not hasattr(data, \"__getitem__\") and \\\n # not hasattr(data, \"__iter__\")):\n # # data is not a list/tuple => wrap it\n # data = [ data ]\n v = {\n 'offset': table.offset,\n #'headers': table.headers,\n 'type': table.types[0],\n 'data': _replace_nans(m.tolist()),\n }\n if table.sizes is not None:\n v[\"size\"] = table.sizes\n\n return json.dumps(v, cls=ExtEncoder)", "def _gen_table_rows(self):\n row_labels = self._get_padded_row_labels()\n column_labels = self._get_padded_column_labels()\n for row in zip(*column_labels):\n yield ''.join('<td>%s</td>' % c for c in row)\n for label, row_string in zip(row_labels, HeatMap._gen_table_rows(self)):\n yield ''.join('<td>%s</td>' % c for c in label) + row_string", "def print_table(rows, labels=None):\n if labels is None:\n labels = ROW_LABELS\n\n output_table = prettytable.PrettyTable()\n output_table.field_names = labels\n output_table.align = 'l'\n output_table.vrules = prettytable.prettytable.ALL\n output_table.hrules = prettytable.prettytable.HEADER\n\n for row in rows:\n row = [x.strip() for x in row]\n output_table.add_row(row)\n\n print output_table\n print ''", "def pprint_table(out, table):\n\n\tcol_paddings = []\n\n\tfor i in range(len(table[0])):\n\t\tcol_paddings.append(get_max_width(table, i))\n\n\tfor row in table:\n\t\t# left col\n\t\tout.write(str(row[0]).ljust(col_paddings[0] + 1))\n\t\t\n\t\t# rest of the cols\n\t\tfor i in range(1, len(row)):\n\t\t\tout.write(str(row[i]).rjust(col_paddings[i] + 2))\n\t\t\n\t\tout.write('\\n')", "def print_table(source, count=False):\n table_value = []\n table_header = []\n for source_key, source_value in source.items():\n for item in source_value:\n table_value.append([v for v in item.values()])\n table_header.append([k for k in item.keys()])\n if not count:\n print(tabulate(table_value,\n headers=table_header[0],\n tablefmt='orgtbl'))\n else:\n print(tabulate([[len(source_value)]],\n headers=[source_key],\n tablefmt='orgtbl'))", "def texttable(table, left=False):\n widths = (max(len(fld) for fld in line)\n for line in itertools.izip_longest(*table, fillvalue=\"\"))\n lc = '-' if left else ''\n formats = [\"%{0}{1}s\".format(lc, width) for width in widths]\n return ORS.join(\"%s\" % OFS.join(format % fld\n for (format, fld) in zip(formats, line))\n for line in table)", "def write(self, stream, root, order):\n stream.write('<table>\\n')\n stream.write('<tbody>\\n')\n if root and isinstance(root[0], dict):\n self.markup(stream, order, '<tr><th>', '</th><th>', '</th></tr>')\n for row in root:\n self.markup(stream, [row.get(col, '') for col in order], '<tr><td>', '</td><td>', '</td></tr>')\n else:\n for row in root:\n self.markup(stream, row, '<tr><td>', '</td></tr>', '</td><td>')\n stream.write('</tbody>\\n')\n stream.write('</table>\\n')", "def _create_table_html(self, table):\n if table != {} and table is not None:\n html_output = [['<hr>']]\n else:\n html_output = []\n\n for t in self._util_func.dict_key_list(table.keys()):\n html_output.append(table[t])\n\n return html_output", "def _print_table(stats):\n max_key_len = max([len(key) for key in stats])\n width_right = 15\n width_left = max(width_right, max_key_len)\n divider = '+-' + '-' * width_left + '-+-' + '-' * width_right + '-+'\n\n def get_format_char(value):\n if isinstance(value, int):\n return 'd'\n elif isinstance(value, float):\n return '.4f'\n else:\n return 's'\n\n print(divider)\n for name, value in stats.items():\n left_format = f':>{width_left}s'\n right_format = f':<{width_right}{get_format_char(value)}'\n line_format = f'| {{{left_format}}} | {{{right_format}}} |'\n line = line_format.format(name, value)\n print(line)\n print(divider)", "def print_table(table, title_list):\n\n # your goes code\n \n table.insert(0, title_list)\n # title listet 0.helyre teszi\n # your code\n\n lenght_list = [] # tartalmazza az összes szót\n for lines in table:\n for items in lines:\n lenght_list.append(items)\n\n longest_words_length = len(max(lenght_list, key=len))\n multiplier = len(title_list)*(longest_words_length+1)\n\n for sublist in table:\n print(\"|\\n|\", \"-\"*multiplier, \"|\")\n\n for j in sublist:\n print(\"|\", j, end = \" \"*(longest_words_length-len(j)))\n\n print(\"|\\n|\",\"-\"*multiplier, \"|\")", "def list_to_table(data, col_count):\r\n\r\n if len(data) % col_count != 0:\r\n message = \"Cannot convert list to table. \" \\\r\n \"The total number of cells ({0}) is not compatible with the number of columns ({1})\"\\\r\n .format(len(data), col_count)\r\n raise ValueError(message)\r\n\r\n row_count = len(data) // col_count\r\n # cpp way\r\n tabled_data = []\r\n for row_i in range(row_count):\r\n row = []\r\n for col_i in range(col_count):\r\n row.append(data[row_i * col_count + col_i])\r\n tabled_data.append(row)\r\n return tabled_data", "def Table(self, line):\n if line is None:\n # TODO(user): Use resource_printer.TablePrinter() when it lands.\n if self._rows:\n cols = len(self._rows[0])\n width = [0 for _ in range(cols)]\n for row in self._rows:\n for i in range(cols - 1):\n w = len(row[i])\n if width[i] <= w:\n width[i] = w + 1\n for row in self._rows:\n self._out.write(' ' * (self._indent[self._level] + 2))\n for i in range(cols - 1):\n self._out.write(row[i].ljust(width[i]))\n self._out.write(row[-1] + '\\n')\n self._rows = []\n self._table = False\n self._out.write('\\n')\n elif not self._table:\n self._table = True\n self.Line()\n else:\n self._rows.append(line.split(','))", "def display_table(a, m):\n # Initialize string\n result = ''\n result += '{'\n\n # Add all polynomials to the string, given they are already a string\n for i in a:\n for j in i[:-1]:\n result += display_poly(j, m)\n result += ', '\n\n # Add the last one here to prevent unneeded comma\n result += display_poly(i[-1], m)\n result += '; '\n\n # Remove final semicolon and close the brace\n result = result[:-2]\n result += '}'\n\n return result", "def __print_work_table(table):\n print \"%-5s %-30s %5s %5s %5s %5s %5s\" % ('Act', 'Pred', 'Block', 'Dummy', 'Succ', 'start', 'end')\n for k, col in sorted(table.items()):\n print \"%-5s %-30s %5s %5s %5s %5s %5s\" % tuple(\n [str(k)] + [list(col[0])] + [str(col[i]) for i in range(1, len(col))])", "def table(app, tuples, _asString=False, **options):\n\n display = app.display\n\n if not display.check(\"table\", options):\n return \"\"\n\n _browse = app._browse\n inNb = app.inNb\n\n api = app.api\n\n dContext = display.distill(options)\n end = dContext.end\n start = dContext.start\n withPassage = dContext.withPassage\n condensed = dContext.condensed\n condenseType = dContext.condenseType\n skipCols = dContext.skipCols\n\n ltr = _getLtr(app, dContext) or \"ltr\"\n\n item = condenseType if condensed else RESULT\n\n if condensed:\n tuples = condense(api, tuples, condenseType, multiple=True)\n skipCols = set()\n\n passageHead = f'</th><th class=\"tf {ltr}\">p' if withPassage is True else \"\"\n\n html = []\n one = True\n\n newOptions = display.consume(options, \"skipCols\")\n\n theseTuples = tuple(tupleEnum(tuples, start, end, LIMIT_TABLE, item, inNb))\n headerTypes = getHeaderTypes(app, theseTuples)\n\n for (i, tup) in theseTuples:\n if one:\n heads = '</th><th class=\"tf\">'.join(\n headerTypes.get(i, f\"column {i}\") for i in range(len(headerTypes))\n )\n html.append(\n f'<tr class=\"tf {ltr}\">'\n f'<th class=\"tf {ltr}\">n{passageHead}</th>'\n f'<th class=\"tf {ltr}\">{heads}</th>'\n f\"</tr>\"\n )\n one = False\n html.append(\n plainTuple(\n app,\n tup,\n seq=i,\n item=item,\n position=None,\n opened=False,\n _asString=True,\n skipCols=skipCols,\n **newOptions,\n )\n )\n html = \"<table>\" + \"\\n\".join(html) + \"</table>\"\n\n if _browse or _asString:\n return html\n dh(html, inNb=inNb)", "def convert_table(mkd):\n\t\n\tmd_table_codes = re.findall(r\".*\\|.*\\n.*\\-.*(?:\\n.*\\|.*)*\", mkd, re.M)\n\tfor md_code in md_table_codes:\n\t\t\n\t\tmd_rows = re.findall(r\"(.*\\|.*)\", md_code, re.M)\n\t\theader = md_rows.pop(0)\n\t\tcolumn_count = md_rows.pop(0).count(\"-\")\n\n\t\ttex_code = \"\\\\begin{tabular}{|\"+\"l|\"*column_count+\"}\\n\\hline\\n\"\n\t\ttex_code += header.strip(\" |\").replace(\"|\", \"&\")+\" \\\\\\\\\\n\\hline\\n\"\n\t\tfor row in md_rows:\n\t\t\ttex_code += row.strip(\" |\").replace(\"|\", \"&\")+\" \\\\\\\\\\n\"\n\t\ttex_code += \"\\hline\\n\\end{tabular}\"\n\n\t\tmkd = mkd.replace(md_code, tex_code)\n\n\treturn mkd", "def vertical_table(\n data, headers, sep_title=\"{n}. row\", sep_character=\"*\", sep_length=27\n):\n header_len = max([len(x) for x in headers])\n padded_headers = [x.ljust(header_len) for x in headers]\n formatted_rows = [_format_row(padded_headers, row) for row in data]\n\n output = []\n for i, result in enumerate(formatted_rows):\n yield _get_separator(i, sep_title, sep_character, sep_length) + result", "def print_table(hdrs, data):\n tw = TextWrapper()\n # only difficult thing here is wrapping the cell if it exceeds the row length, and it could be\n # extended in multiple cells in the same row so we need to determine the longest cell...\n def get_row_string(column_widths, row_data, fmt_separator=\"|\"):\n # receive a list of ints representing each column width and a list of text data representing\n # data for each column and return single string line.\n fmt = []\n cols = []\n for index, width in enumerate(column_widths):\n fmt.append(\"{%s:<%s}\" % (index, width))\n if index<len(row_data):\n #text = \" \".join(row_data[index].strip().split())\n text = row_data[index]\n tw.width = width\n # to honor original user's return characters, we need to wrap each individual line\n wraps = []\n for line in text.split(\"\\n\"):\n wrapped = tw.wrap(line.strip())\n if len(wrapped) == 0:\n wraps+= [\"\"]\n else:\n wraps+= wrapped\n cols.append(wraps)\n else:\n cols.append([\"\"])\n fmt = \"%s%s%s\" % (fmt_separator, (\" %s \" % fmt_separator).join(fmt), fmt_separator)\n # expand all columns to the max length column\n max_col = max([len(c) for c in cols])\n for c in cols:\n c+= [\"\"]*(max_col - len(c))\n #logger.debug(\"fmt: %s\", fmt)\n #logger.debug(\"columns:%s max length:%s\\n%s\", len(cols), max_col, cols)\n # build final result string which is one or more lines of merged cells\n results = []\n for index in range(0, max_col):\n # grab this index from all columns to create a single row\n row = [c[index] for c in cols]\n results.append(fmt.format(*row))\n return \"\\n\".join(results)\n\n final_rows = []\n column_widths = [h.get(\"length\", 5) for h in hdrs]\n separator = [\"-\"*h.get(\"length\", 5) for h in hdrs]\n separator_string = get_row_string(column_widths, separator, fmt_separator=\"+\")\n final_rows.append(separator_string)\n final_rows.append(get_row_string(column_widths, [h.get(\"name\", \"\") for h in hdrs]))\n final_rows.append(separator_string)\n for row in data:\n final_rows.append(get_row_string(column_widths, row))\n final_rows.append(separator_string)\n print(\"\\n\".join(final_rows))", "def _tabulate_data(\n self, headers, tabular_data, column_spacing=2, divider='-'\n ):\n max_lengths = [len(str(header)) for header in headers]\n for data_row in tabular_data:\n for column_index, item in enumerate(data_row):\n item = str(item).replace(self.color_package, '')\n item = str(item).replace(self.color_foreground, '')\n if len(str(item)) > max_lengths[column_index]:\n max_lengths[column_index] = len(str(item))\n\n dividers = [divider * length for length in max_lengths]\n\n def tabulate_row(items):\n row = ''\n item_template = '{item}{spacing}'\n for i, row_item in enumerate(items):\n\n # clear colors before calculating\n colorless_item = (\n str(row_item).replace(self.color_package, '')\n )\n colorless_item = colorless_item.replace(\n self.color_foreground, '')\n\n item_spacing = ' ' * (\n max_lengths[i] +\n column_spacing -\n len(str(colorless_item))\n )\n row += item_template.format(\n item=row_item, spacing=item_spacing)\n return row.strip() + '\\n'\n\n result = tabulate_row(items=headers)\n result += tabulate_row(items=dividers)\n for data_row in tabular_data:\n result += tabulate_row(items=data_row)\n\n return result.rstrip()", "def mediaWikiTable(leftmostTitle, array, formatFn=lambda x: str(x)):\n columnKeys = extractColumnKeys(array)\n print(\"{|\")\n for t in [leftmostTitle] + [str(k) for k in columnKeys]:\n print(\"!\" + \" !! \".join(titles))\n for k in sorted(array.keys, key=cmp_to_key(compareFn)):\n print(\"|-\")\n print(\"| \" + str(k))\n v = array[k]\n for ck in columnKeys:\n value = v.get(k, None)\n print(\"| \" + (formatFn(value) if value else \"\"))\n print(\"|}\")", "def _pretty_print_2d_array(rows):\n s = [[str(e) for e in row] for row in rows]\n lens = [max(map(len, col)) for col in zip(*s)]\n fmt = \"\\t\".join(\"{{:{}}}\".format(x) for x in lens)\n table = [fmt.format(*row) for row in s]\n return \"\\n\" + \"\\n\".join(table)", "def html_table(header_data, row_data):\n def make_header_cell(s):\n return '<th>{}</th>'.format(s)\n\n def make_cell(s):\n return '<td>{}</td>'.format(s)\n\n def make_row(s):\n return '<tr>{}</tr>'.format(s)\n headers = \" \".join([make_header_cell(h) for h in header_data])\n header_row = make_row(headers)\n rows = [make_row(\" \".join([make_cell(c) for c in row]))\n for row in row_data]\n rows = \"\\n\".join(rows)\n html = '<table>' + header_row + rows + '</table>'\n return html", "def format_no_tty(table):\r\n for i, row in enumerate(table.rows):\r\n for j, item in enumerate(row):\r\n table.rows[i][j] = format_output(item, fmt='raw')\r\n ptable = table.prettytable()\r\n for col in table.columns:\r\n ptable.align[col] = 'l'\r\n ptable.hrules = NONE\r\n ptable.border = False\r\n ptable.header = False\r\n ptable.left_padding_width = 0\r\n ptable.right_padding_width = 2\r\n return ptable", "def file_list_table_format(result):\n table = []\n for item in result:\n row = OrderedDict()\n row['Name'] = item['name']\n row['Type'] = item['fileType']\n row['Size'] = '' if item['fileType'] == 'directory' else str(item['contentLength'])\n row['Modified'] = item['lastModified'] or ' '\n table.append(row)\n return table", "def print_table(ledger):\n\n table = PrettyTable() # defines a PrettyTable object\n\n table.field_names = [\n \"hospital\",\n \"patient\",\n \"status\",\n \"nonce\",\n \"prev_hash\",\n \"a\",\n \"b\",\n \"c\",\n \"current_hash\",\n ] # define field names for table\n\n for block in ledger:\n table.add_row(\n [\n block[\"hospital\"],\n block[\"patient\"],\n block[\"status\"],\n block[\"nonce\"],\n block[\"prev_hash\"],\n block[\"a\"],\n block[\"b\"],\n block[\"c\"],\n block[\"current_hash\"],\n ]\n ) # add data to table\n\n print(\"\\n\\n\" + color.BOLD + \"Printing Your Ledger:\" + color.END)\n print(table) # print prettytable of patient info", "def show_table():\n\n title_list = ('ID', 'Platform', 'Producer', 'Year', 'Elements')\n \n return table, title_list", "def raw_data_to_table(raw_data, cursor):\n table = PrettyTable()\n # setting table field names\n table.field_names = [column[0] for column in cursor.description]\n for row in raw_data:\n table.add_row(row)\n print(table)", "def cluster_list_table_format(result):\n table = []\n for item in result:\n table.append(cluster_show_table_format(item))\n return table", "def build_table():\n with contextlib.ExitStack() as stack:\n files = [stack.enter_context(gzip.open(f, 'rt')) for f in sys.argv[1:]]\n iters = [(line.split() for line in f) for f in files]\n for it in iters:\n next(it)\n key = operator.itemgetter(0)\n table = []\n for k, g in itertools.groupby(merge(*iters, key=key), key=key):\n props = list(g)\n if len(props) == len(iters):\n table.append([k] + [x[1] for x in props])\n for snp in table:\n print(*snp)", "def print_table(table):\n for row in table:\n print(row)", "def print_table(table):\n for row in table:\n print(row)", "def table_html(table_rows: List[str]) -> str:\n return \"<table>{}</table>\".format(\"\".join(table_rows))", "def test_tabular_output_formatter():\n headers = [\"text\", \"numeric\"]\n data = [\n [\"abc\", Decimal(1)],\n [\"defg\", Decimal(\"11.1\")],\n [\"hi\", Decimal(\"1.1\")],\n [\"Pablo\\rß\\n\", 0],\n ]\n expected = dedent(\n \"\"\"\\\n +-------+---------+\n | text | numeric |\n +-------+---------+\n | abc | 1 |\n | defg | 11.1 |\n | hi | 1.1 |\n | Pablo | 0 |\n | ß | |\n +-------+---------+\"\"\"\n )\n\n print(expected)\n print(\n \"\\n\".join(\n TabularOutputFormatter().format_output(\n iter(data), headers, format_name=\"ascii\"\n )\n )\n )\n assert expected == \"\\n\".join(\n TabularOutputFormatter().format_output(iter(data), headers, format_name=\"ascii\")\n )", "def format_table(table, use_header=True, table_format=DEFAULT_TABLE_FORMAT, float_format=DEFAULT_FLOAT_FORMAT,\n\t\tcol_align=DEFAULT_COL_ALIGN):\n\n\tnum_cols = get_num_columns(table)\n\n\t# Parse parameters.\n\theaders = 'firstrow' if use_header else ()\n\tif isinstance(col_align, Iterable) and not isinstance(col_align, str):\n\t\t# Convert each `'none'` into `None`.\n\t\tcol_align = [\n\t\t\tNone if align.lower() == 'none' else align\n\t\t\tfor align in col_align\n\t\t]\n\telse:\n\t\t# Convert `'none'` into `None`.\n\t\tif col_align.lower() == 'none':\n\t\t\tcol_align = None\n\n\t\tcol_align = (col_align,) * num_cols\n\n\tformatted_table = tabulate(table, headers=headers, tablefmt=table_format, floatfmt=float_format, colalign=col_align)\n\t\n\treturn formatted_table", "def print_table(table, title_list):\n\n # your goes code\n cols = len(title_list)\n\n \n\n table.insert(0,title_list)\n\n for sublist in range(len(table)):\n if cols != len(table[sublist]):\n print('dataset does not match number of cols')\n quit()\n\n max_lenghts = []\n maxi = -1\n for sub_elem in range(cols): \n maxi = -1 \n for sublist in range(len(table)):\n if len(table[sublist][sub_elem]) > maxi:\n maxi = len(table[sublist][sub_elem])\n max_lenghts.append(maxi)\n \n\n \n\n sub_elem = 0\n \n for sublist in range(len(table)):\n if sublist == 0:\n while sub_elem < len(table[0]):\n \n if sub_elem == len(table[0])- 1:\n print('\\033[1;37;41m| {:^25} |'.format(table[sublist][sub_elem]), end =\"\")\n else:\n print('\\033[1;37;41m| {:^25} '.format(table[sublist][sub_elem]), end =\"\")\n sub_elem += 1\n \n print('\\033[0;32;48m\\n') \n sub_elem = 0 \n else:\n while sub_elem < len(table[0]):\n \n if sub_elem == len(table[0])- 1:\n print('\\033[0;37;44m| {:^25} |'.format(table[sublist][sub_elem]), end =\"\")\n else:\n print('\\033[0;37;44m| {:^25} '.format(table[sublist][sub_elem]), end =\"\")\n sub_elem += 1\n \n print('\\033[0;32;48m\\n') \n sub_elem = 0 \n print('\\033[0;37;48m\\n')\n table.pop(0)", "def print_table(table, fieldnames):\n print(\"{:<19}\".format(fieldnames[0]), end='')\n for field in fieldnames[1:]:\n print(\"{:>6}\".format(field), end='')\n print(\"\")\n for name, row in table.items():\n # Header column left justified\n print(\"{:<19}\".format(name), end='')\n # Remaining columns right justified\n for field in fieldnames[1:]:\n print(\"{:>6}\".format(row[field]), end='')\n print(\"\", end='\\n')", "def print_table(table, fieldnames):\n print(\"{:<19}\".format(fieldnames[0]), end='')\n for field in fieldnames[1:]:\n print(\"{:>6}\".format(field), end='')\n print(\"\")\n for name, row in table.items():\n # Header column left justified\n print(\"{:<19}\".format(name), end='')\n # Remaining columns right justified\n for field in fieldnames[1:]:\n print(\"{:>6}\".format(row[field]), end='')\n print(\"\", end='\\n')", "def print_table(header, rows, *, sortby=\"\", alignl=\"\", alignr=\"\", hrules=\"\"):\n output = prettytable.PrettyTable(header)\n output.format = True\n if hrules:\n output.hrules = getattr(prettytable, hrules)\n\n for row in rows:\n if len(header) != len(row):\n raise ValueError(\"row does not have same size of header\")\n row_entry = []\n for pos in row:\n row_entry.append(pos)\n output.add_row(row_entry)\n\n if sortby:\n # if sortby is invalid, ie, does not exist on header,\n # sort by first column by default\n output.sortby = sortby if sortby in header else header[0]\n for left in alignl:\n output.align[left] = \"l\"\n for right in alignr:\n output.align[right] = \"r\"\n\n print(output)", "def columnar(list_table_rows: list[dict[str, str]]) -> dict[str, list[str]]:\n column_oriented_table: dict[str, list[str]] = {}\n first_row: dict[str, str] = list_table_rows[0]\n for column in first_row:\n column_oriented_table[column] = column_values(list_table_rows, column)\n return column_oriented_table", "def format_participant_table(participants_f, url_prefix):\n personal_ids = sorted(parse_recipients(participants_f).keys())\n url_prefix = url_prefix if url_prefix.endswith('/') else url_prefix + '/'\n\n result = '<table class=\"data-table\">\\n<tr><th>Personal ID</th></tr>\\n'\n for personal_id in personal_ids:\n url = url_prefix + personal_id + '/index.html'\n result += '<tr><td><a href=\"%s\">%s</a></td></tr>\\n' % (url,\n personal_id)\n result += '</table>\\n'\n\n return result", "def print_table(table_2D, title_list):\n \n max_length = [] # max length of item for each column\n\n # BELOW VAR NEEDS TO BE FIXED, GOT RID OFF\n # without this correction table horizontal lines displays unevenly\n length_correction = 2 \n\n # count max length of all elements in a table, so we can print all details in neat columns\n for row in table_2D:\n column = 0\n\n for item in row:\n item = str(item)\n\n try:\n if len(item) > max_length[column]:\n max_length[column] = len(item)\n column += 1\n # expand table if needed\n except IndexError:\n max_length.append(0)\n if len(item) > max_length[column]:\n max_length[column] = len(item)\n column += 1\n\n title_index = \"No\"\n\n # print titles, while keeping columns straight\n titles = side_sign + \" \" + title_index + separator_sign\n for i in range(len(title_list)):\n # count length of all titles, to check if they are longer than entries\n if len(title_list[i]) > max_length[i]:\n max_length[i] = len(title_list[i])\n\n titles += title_list[i] + fill(str(title_list[i]), max_length[i]) + separator_sign\n\n print(\"\\n\\t/\" + fill(\"\", len(titles.strip())-length_correction, sourrounding_sign) + \"\\\\\") # print top line\n print(\"\\t\" + titles)\n print(\"\\t\" + side_sign + fill(\"\", len(titles.strip())-length_correction, sourrounding_sign) + side_sign) # print line below titles\n\n table_content = \"\"\n # print all game details, while keeping columns straight\n for row in range(len(table_2D)):\n table_content += \"\\t\" + side_sign + \" \" + str(row+1) + fill(str(row+1), max(len(str(row+1)), len(title_index))) + separator_sign\n for item in range(len(table_2D[row])):\n table_content += str(table_2D[row][item]) + fill(str(table_2D[row][item]), max_length[item]) + separator_sign\n table_content += \"\\n\"\n\n print(table_content, end=\"\")\n print(\"\\t\\\\\" + fill(\"\", len(titles.strip())-length_correction, sourrounding_sign) + \"/\")", "def print_table(table):\n for row in table:\n print(row)", "def table_row(self, content):\n return ['<tr>\\n%s</tr>\\n'] + content", "def ydump_table(doc, headings, rows, **kwargs):\n doc, tag, text, line = doc.ttl()\n with tag('table', **kwargs):\n with tag('tr'):\n for x in headings:\n line('th', str(x))\n for row in rows:\n with tag('tr'):\n for x in row:\n line('td', str(x))", "def pretty_display(self):\n\t\tpretty_space = PrettyTable()\n\t\tpretty_space.field_names = range(self.space.shape[1])\n\t\tcount = 0\n\t\tpretty_row = []\n\t\tfor cell in self.space.flat:\n\t\t\tcount = count + 1\n\t\t\tpretty_row.append(cell.state)\n\t\t\tif count >= self.space.shape[1]:\n\t\t\t\tpretty_space.add_row(pretty_row)\n\t\t\t\tcount = 0\n\t\t\t\tpretty_row = []\n\t\tprint(pretty_space)", "def table_maker():\r\n try:\r\n off_copy = off.copy()\r\n man_copy = man.copy()\r\n exe_copy = exe.copy()\r\n ceo_copy = ceo.copy()\r\n list_of_lists = [off_copy, man_copy, exe_copy, ceo_copy]\r\n\r\n for i in list_of_lists:\r\n for j in i:\r\n if type(j) == str:\r\n continue\r\n else:\r\n raise ValueError('All elements must be strings')\r\n\r\n row_num = max(len(off_copy), len(man_copy),\r\n len(exe_copy), len(ceo_copy))\r\n for i in list_of_lists:\r\n if len(i) != row_num:\r\n diff = row_num - len(i)\r\n for j in range(diff):\r\n i.append('')\r\n\r\n t = PrettyTable(\r\n ['Office Workers', 'Managers', 'Executives', 'CEO'])\r\n for i in range(row_num):\r\n t.add_row([off_copy[i], man_copy[i], exe_copy[i], ceo_copy[i]])\r\n\r\n with open('Employee Table.txt', 'w') as f:\r\n f.write(str(t))\r\n\r\n except FileNotFoundError:\r\n print(\"Error: No file entered\")" ]
[ "0.7323263", "0.70893776", "0.7016123", "0.6676324", "0.6575941", "0.64442533", "0.6433064", "0.64183867", "0.6377864", "0.6365274", "0.6337073", "0.6316442", "0.6308462", "0.6291281", "0.6273893", "0.62692267", "0.6268458", "0.6246334", "0.6235649", "0.62235904", "0.62235904", "0.6220759", "0.6219042", "0.6208327", "0.6190434", "0.61716306", "0.61683315", "0.61683095", "0.61476195", "0.61434394", "0.6139954", "0.61361617", "0.6113751", "0.6111137", "0.61103606", "0.6069568", "0.60608983", "0.6050501", "0.60489714", "0.6047363", "0.60439605", "0.603639", "0.6035302", "0.6018542", "0.6018187", "0.6013392", "0.5983218", "0.5960558", "0.59586716", "0.5939885", "0.5936998", "0.59322727", "0.5930297", "0.591615", "0.5907004", "0.5903251", "0.5889378", "0.58870184", "0.58826476", "0.5874404", "0.58721685", "0.5869965", "0.5862042", "0.5856984", "0.58458775", "0.5844088", "0.58413404", "0.58280075", "0.58193547", "0.58151615", "0.58125424", "0.5806402", "0.58046556", "0.5800749", "0.57999474", "0.57787585", "0.5769634", "0.5752817", "0.57345366", "0.5730892", "0.5729492", "0.57254463", "0.57189524", "0.5717271", "0.5717271", "0.57171977", "0.5714316", "0.57130647", "0.5711167", "0.5709617", "0.5709617", "0.5690366", "0.5689668", "0.56857777", "0.56830764", "0.5679681", "0.5677216", "0.56748724", "0.56746274", "0.5661142" ]
0.64111507
8
Handy function which splits a list of arguments and keyword arguments, translates names of Gadget instances to actual objects, evaluates expressions that can be evaluated, and accepts the rest as strings. For example,
def str_to_args(line): args_in = line.split() args_out = [] kwargs_out = {} gadget_lookup = {g.name: g for g in Gadget.getinstances()} for a in args_in: if '=' in a: key, val = a.split('=') if ('*' in val) or ('?' in val): matching_names = filter(gadget_lookup.keys(), val) kwargs_out[key] = [gadget_lookup[name] for name in matching_names] elif val in gadget_lookup.keys(): kwargs_out[key] = gadget_lookup[val] else: kwargs_out[key] = eval(val) else: if ('*' in a) or ('?' in a): matching_names = filter(gadget_lookup.keys(), a) args_out += [gadget_lookup[name] for name in matching_names] elif a in gadget_lookup.keys(): args_out.append(gadget_lookup[a]) else: try: args_out.append(eval(a)) except NameError: args_out.append(a) return args_out, kwargs_out
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def eval(*args, **kwargs):\n\n pass", "def eval(x, env):\n if isinstance(x, Symbol): # variable reference\n return env.find(x)[x]\n elif not isinstance(x, List): # constant literal\n return x\n keyword = x.pop()\n if keyword in {\"quote\", \"'\"}: # (quote exp)\n return (\n x[0]\n if len(x) == 2\n else \" \".join(str(exp) for exp in x)\n .replace(\"[\", \"(\")\n .replace(\"]\", \")\")\n .replace(\"'\", \"\")\n .replace(\",\", \"\")\n )\n elif keyword == \"cond\": # (if test conseq alt)\n for cond_branch in x:\n branch, cond = cond_branch[:-1], cond_branch[-1]\n if cond == \"else\" or eval(cond, env):\n return eval(branch, env)\n elif keyword == \"define\": # (define var exp)\n var, exp = x\n env[var] = eval(exp, env)\n elif keyword == \"lambda\": # (lambda (var...) body)\n parms, body = x\n return Procedure(parms, body, env)\n else: # (proc arg...)\n proc = eval(keyword, env)\n args = [eval(exp, env) for exp in x] if x[-1] != \"'\" else x[:-1]\n return proc(*args)", "def test_split(self):\n\n p1 = \"std::vector<char, std::allocator<char> >\"\n p2 = \"std::vector<int, std::allocator<int> >\"\n args_list = [\n \"const std::basic_string<char> &\", \"const int &\", \"const double &\"]\n\n for arg in args_list:\n\n li = [p1]\n name, args = declarations.templates.split(\n \"myClass0a<\" + \", \".join(li) + \">\")\n self.assertEqual(name, \"myClass0a\")\n self.assertEqual(args, li)\n\n li = [p1, p2]\n name, args = declarations.templates.split(\n \"myClass0b<\" + \", \".join(li) + \">\")\n self.assertEqual(name, \"myClass0b\")\n self.assertEqual(args, li)\n\n li = [p1, p2, p2]\n name, args = declarations.templates.split(\n \"myClass0c<\" + \", \".join(li) + \">\")\n self.assertEqual(name, \"myClass0c\")\n self.assertEqual(args, li)\n\n li = [p1 + \" (\" + arg + \")\"]\n name, args = declarations.templates.split(\n \"myClass1<\" + \", \".join(li) + \">\")\n self.assertEqual(name, \"myClass1\")\n self.assertEqual(args, li)\n\n li = [p1 + \" (\" + arg + \", \" + arg + \")\"]\n name, args = declarations.templates.split(\n \"myClass2<\" + \", \".join(li) + \">\")\n self.assertEqual(name, \"myClass2\")\n self.assertEqual(args, li)\n\n li = [p2 + \" (\" + arg + \", \" + arg + \")\"]\n name, args = declarations.templates.split(\n \"myClass3<\" + \", \".join(li) + \">\")\n self.assertEqual(name, \"myClass3\")\n self.assertEqual(args, li)\n\n li = [p1 + \" (\" + arg + \", \" + arg + \", \" + arg + \")\"]\n name, args = declarations.templates.split(\n \"myClass4<\" + \", \".join(li) + \">\")\n self.assertEqual(name, \"myClass4\")\n self.assertEqual(args, li)\n\n li = [\n p1 + \" (\" + arg + \", \" + arg + \", \" + arg + \")\",\n p1]\n name, args = declarations.templates.split(\n \"myClass5<\" + \", \".join(li) + \">\")\n self.assertEqual(name, \"myClass5\")\n self.assertEqual(args, li)\n\n li = [\n p1,\n p1 + \" (\" + arg + \", \" + arg + \", \" + arg + \")\"]\n name, args = declarations.templates.split(\n \"myClass6<\" + \", \".join(li) + \">\")\n self.assertEqual(name, \"myClass6\")\n self.assertEqual(args, li)\n\n li = [\n p2 + \" (\" + arg + \")\",\n p1,\n p1 + \" (\" + arg + \", \" + arg + \", \" + arg + \")\"]\n name, args = declarations.templates.split(\n \"myClass7<\" + \", \".join(li) + \">\")\n self.assertEqual(name, \"myClass7\")\n self.assertEqual(args, li)\n\n li = [\n p1,\n p2 + \" (\" + arg + \")\",\n p1 + \" (\" + arg + \", \" + arg + \", \" + arg + \")\"]\n name, args = declarations.templates.split(\n \"myClass8<\" + \", \".join(li) + \">\")\n self.assertEqual(name, \"myClass8\")\n self.assertEqual(args, li)\n\n li = [\n p2 + \" (\" + arg + \")\",\n p1 + \" (\" + arg + \", \" + arg + \")\",\n p1]\n name, args = declarations.templates.split(\n \"myClass9<\" + \", \".join(li) + \">\")\n self.assertEqual(name, \"myClass9\")\n self.assertEqual(args, li)\n\n li = [\n p2 + \" (\" + arg + \")\",\n p1 + \" (\" + arg + \", \" + arg + \", \" + arg + \")\",\n p1,\n p2]\n name, args = declarations.templates.split(\n \"myClass10<\" + \", \".join(li) + \">\")\n self.assertEqual(name, \"myClass10\")\n self.assertEqual(args, li)", "def eval(x, env=global_env):\n if isinstance(x, Symbol):\n if x[0] == '\"':\n # x is a string literal\n # Cut off the quotes and return it as such\n return x[1:-1]\n\n # OK, it's a variable.\n return get_var(x, env)\n\n # Maybe x isn't a list but some kind of literal\n elif not isinstance(x, List):\n # const. literal\n return x\n\n # OK, x is a list, but is it empty?\n elif len(x) == 0:\n return []\n\n # It isn't empty... maybe it's a special form.\n # Dot extraction special form\n elif x[0] == '.':\n return dot_extraction(x, env)\n\n # Conditional special form\n elif x[0] == 'if':\n try:\n # With an alt clause\n (_, test, conseq, alt) = x\n except ValueError:\n try:\n # Without an alt clause, defaults to False\n (_, test, conseq) = x\n alt = False\n except ValueError:\n raise SyntaxError(\n \"if requires two or three arguments\" +\n \"(test, consqeuence, and optional alternative)\")\n exp = (conseq if eval(test, env) else alt)\n return eval(exp, env)\n\n # Variable definition special form\n elif x[0] == 'define':\n try:\n (_, var, exp) = x\n except ValueError:\n raise SyntaxError(\n \"define requires exactly two arguments \" +\n \"(the name of the variable and its value)\")\n val = eval(exp, env)\n env[var] = val\n # This is not standard Lisp, but I like it\n return val\n\n # Import special form\n elif x[0] == 'import':\n try:\n (_, exp) = x\n except ValueError as e:\n raise SyntaxError(\n \"import requires exactly 1 argument \" +\n \"(the name of the module). {}\".format(e))\n return importlib.import_module(exp)\n\n else:\n # This is the default case:\n # (f arg1 arg2 .. argn)\n # or perhaps\n # (item1 item2 ... itemn)\n\n # Evaluate the first item, to see if it gives us back a callable\n proc = eval(x[0], env)\n\n # Handle the case of (item1 item2 ... itemn)\n if not callable(proc):\n # If input is of the form (item), put item in a list and we're done\n if len(x) == 1:\n return [proc]\n else:\n # If there are more elements, eval them and put them in a list\n L = [proc]\n for item in x[1:]:\n L.append(eval(item))\n return L\n\n # OK, input is of the form (f arg1 arg2 ... argn)\n args = [eval(arg, env) for arg in x[1:]]\n try:\n if wants_env(proc):\n return proc(*args, env=env)\n else:\n return proc(*args)\n except TypeError as e:\n if callable(proc):\n # Callable, but wrong number of args or something\n raise NameError(e)\n raise NameError(\"Tried to call a non-callable Python object {} \" +\n \"(its type is {})\".format(x[0], type(proc)))", "def eval_arg(arg_value, arg_name=''):\n if arg_name.lower().endswith('_list') and isinstance(arg_value, str):\n return [eval_arg(cell) for cell in arg_value.split(',')]\n if not isinstance(arg_value, str):\n return arg_value\n if arg_value.lower() in ['true', 'false']:\n return eval(arg_value.capitalize())\n if arg_value.lstrip('-').isdigit():\n return int(arg_value)\n if arg_value.replace('.', '', 1).isdigit():\n return float(arg_value)\n return arg_value", "def expression(*args, alwaysEvaluate: Union[int, bool]=0, animated: Union[int, bool]=0,\n attribute: Union[AnyStr, bool]=\"\", name: Union[AnyStr, bool]=\"\", object:\n Union[AnyStr, bool]=\"\", safe: bool=True, shortNames: bool=True, string:\n Union[AnyStr, bool]=\"\", timeDependent: bool=True, unitConversion: Union[AnyStr,\n bool]=\"all.\", q=True, query=True, e=True, edit=True, **kwargs)->Union[AnyStr,\n Any]:\n pass", "def _eval_params(trial, params: Dict[str, Any]) -> Dict[str, Any]:\n prepared = dict()\n for arg, value in params.items():\n if isinstance(value, dict):\n # Extract method.\n name = list(value.keys())[0]\n # Add prefix.\n method = \"suggest_\" + name\n # Get method kwargs.\n kwargs = value[name]\n # Add name arg.\n kwargs.update({\"name\": arg})\n # Evaluate method.\n value = getattr(trial, method)(**kwargs)\n prepared.update({arg: value})\n return prepared", "def _builtin_split_call(term, parts, database=None, location=None, **kwdargs):\n functor = '=..'\n # modes:\n # <v> =.. list => list has to be fixed length and non-empty\n # IF its length > 1 then first element should be an atom\n # <n> =.. <list or var>\n #\n mode = check_mode((term, parts), ['vL', 'nv', 'nl'], functor=functor, **kwdargs)\n if mode == 0:\n elements, tail = list_elements(parts)\n if len(elements) == 0:\n raise CallModeError(functor, (term, parts),\n message='non-empty list for arg #2 if arg #1 is a variable',\n location=database.lineno(location))\n elif len(elements) > 1 and not _is_atom(elements[0]):\n raise CallModeError(functor, (term, parts),\n message='atom as first element in list if arg #1 is a variable',\n location=database.lineno(location))\n elif len(elements) == 1:\n # Special case => term == parts[0]\n return [(elements[0], parts)]\n else:\n term_part = elements[0](*elements[1:])\n return [(term_part, parts)]\n else:\n part_list = (term.with_args(),) + term.args\n current = Term('[]')\n for t in reversed(part_list):\n current = Term('.', t, current)\n try:\n local_values = {}\n list_part = unify_value(current, parts, local_values)\n elements, tail = list_elements(list_part)\n term_new = elements[0](*elements[1:])\n term_part = unify_value(term, term_new, local_values)\n return [(term_part, list_part)]\n except UnifyError:\n return []", "def act_on_expression(self, *, arg, values: List, op):\n assert isinstance(arg, (pl.DataFrame, type(None)))\n assert isinstance(values, List)\n assert isinstance(op, data_algebra.expr_rep.Expression)\n # process inputs\n for v in values:\n assert isinstance(v, (List, PolarsTerm))\n want_literals_unpacked = (op.op in self.polars_model.want_literals_unpacked)\n if want_literals_unpacked:\n args = _unpack_lits(values)\n else:\n args = [v.polars_term for v in values]\n # lookup method\n f = None\n arity = len(values)\n if (f is None) and (arity == 0):\n if op.op in [\"_uniform\", \"uniform\"]:\n assert isinstance(arg, pl.DataFrame)\n return PolarsTerm(\n polars_term=pl.Series(\n values=self.polars_model.rng.uniform(0.0, 1.0, arg.shape[0]),\n dtype=pl.datatypes.Float64,\n dtype_if_empty=pl.datatypes.Float64),\n is_series=True,\n )\n elif op.op in [\"_sgroup\", \"sgroup\"]:\n assert isinstance(arg, pl.DataFrame)\n n_groups = 0\n if arg.shape[0] > 0:\n n_groups = 1\n if len(self.partition_by) > 0:\n s_groups = arg.groupby(self.partition_by).apply(lambda x: x.head(1)).shape[0]\n return PolarsTerm(\n polars_term=pl.lit(s_groups),\n lit_value=n_groups,\n is_literal=True,\n )\n elif op.op in [\"_ngroup\", \"ngroup\"]:\n assert isinstance(arg, pl.DataFrame)\n group_labels = []\n if arg.shape[0] > 0:\n n_groups = [0] * arg.shape[0]\n if len(self.partition_by) > 0:\n # TODO: number the groups, not size them\n n_groups = arg.groupby(self.partition_by).apply(lambda x: x.head(1)).shape[0]\n return PolarsTerm(\n polars_term=pl.Series(\n values=group_labels,\n dtype=pl.datatypes.Int64,\n dtype_if_empty=pl.datatypes.Int64),\n is_series=True,\n )\n if f is None:\n try:\n if self.extend_context:\n f = self.polars_model.extend_expr_impl_map[len(values)][op.op]\n elif self.project_context:\n f = self.polars_model.project_expr_impl_map[len(values)][op.op]\n except KeyError:\n pass\n if (f is None) and (arity > 0):\n try:\n f = self.polars_model.impl_map_arbitrary_arity[op.op]\n except KeyError:\n pass\n if f is None:\n raise ValueError(f\"failed to lookup {op}\")\n # apply method\n res = f(*args)\n # wrap result\n return PolarsTerm(\n polars_term=res,\n )", "def fetch_arguments(op_def, arg, ws):\n return [fetch_argument(op_def, desc, ws) for desc in arg.strings]", "def _eval_composite(self, vars, expr, dest=None):\n parts = expr.strip().split(\" \")\n if len(parts) == 1:\n try:\n # Try parsing as an atom; may fail which is okay, there's more cases we\n # can try below.\n return self._eval_atom(vars, parts[0])\n except NotAnAtom:\n pass\n (instr, arg) = parts[0].split(\":\")\n if instr == \"array\":\n count = int(arg)\n result = []\n if not dest is None:\n vars[dest] = result\n for part in parts[1:]:\n result.append(self._eval_atom(vars, part))\n assert count == len(result)\n return result\n elif instr == \"map\":\n count = int(arg)\n result = collections.OrderedDict()\n if not dest is None:\n vars[dest] = result\n for i in range(0, count):\n key = self._eval_atom(vars, parts[1 + 2 * i])\n value = self._eval_atom(vars, parts[2 + 2 * i])\n result[key] = value\n return result\n elif instr == \"seed\":\n count = int(arg)\n header = self._eval_atom(vars, parts[1])\n fields = collections.OrderedDict()\n result = plankton.codec.Seed(header, fields)\n if not dest is None:\n vars[dest] = result\n for i in range(0, count):\n field = self._eval_atom(vars, parts[2 + 2 * i])\n value = self._eval_atom(vars, parts[3 + 2 * i])\n fields[field] = value\n return result\n elif instr == \"struct\":\n count = int(arg)\n fields = []\n result = plankton.codec.Struct(fields)\n if not dest is None:\n vars[dest] = result\n for i in range(0, count):\n tag = int(parts[1 + 2 * i])\n value = self._eval_atom(vars, parts[2 + 2 * i])\n fields.append((tag, value))\n return result\n raise Exception(\"Unexpected expression {}\".format(expr))", "def substitute(self, args, lvars):\n if is_String(args) and not isinstance(args, CmdStringHolder):\n args = str(args) # In case it's a UserString.\n try:\n def sub_match(match):\n return self.conv(self.expand(match.group(1), lvars))\n result = _dollar_exps.sub(sub_match, args)\n except TypeError:\n # If the internal conversion routine doesn't return\n # strings (it could be overridden to return Nodes, for\n # example), then the 1.5.2 re module will throw this\n # exception. Back off to a slower, general-purpose\n # algorithm that works for all data types.\n args = _separate_args.findall(args)\n result = []\n for a in args:\n result.append(self.conv(self.expand(a, lvars)))\n if len(result) == 1:\n result = result[0]\n else:\n result = ''.join(map(str, result))\n return result\n else:\n return self.expand(args, lvars)", "def _call(self, args):\n a = args.split(' ', 1)\n if a:\n getattr(self, a[0])(*a[1:])", "def split_args(args):\n words = []\n quoted_words = []\n\n quoted = re.compile('\"([^\"]+)\"')\n for value in quoted.findall(args):\n quoted_words.append(value)\n \n new_str = args\n\n for i in quoted_words:\n new_str = re.sub('\"[^\"]+\"', '', new_str)\n\n for i in new_str.split():\n words.append(i)\n \n words.extend(quoted_words)\n \n return words", "def call_func_dynamically(function_name, argument_names, arg_value_pair, module_name = \"__main__\"):\n # mapping between arg name and arg value\n arg = list(map(lambda arg_name, arg_value: str_join([arg_name, arg_value], \"=\"), argument_names, arg_value_pair))\n \n # make function call expresion\n func_call = function_name + \"(\" + str_join(arg, \",\") + \")\"\n \n # result\n result = eval(func_call, {function_name : getattr(sys.modules[module_name], function_name)})\n \n return result", "def parameterize(names, value_groups):\n\n def decorator(func):\n @functools.wraps(func)\n def wrapped(self):\n for values in value_groups:\n resolved = map(Invoked.eval, always_iterable(values))\n params = dict(zip(always_iterable(names), resolved))\n with self.subTest(**params):\n func(self, **params)\n\n return wrapped\n\n return decorator", "def parse_arguments(args):", "def substitute(self, args, lvars, within_list):\n\n if is_String(args) and not isinstance(args, CmdStringHolder):\n args = str(args) # In case it's a UserString.\n args = _separate_args.findall(args)\n for a in args:\n if a[0] in ' \\t\\n\\r\\f\\v':\n if '\\n' in a:\n self.next_line()\n elif within_list:\n self.append(a)\n else:\n self.next_word()\n else:\n self.expand(a, lvars, within_list)\n else:\n self.expand(args, lvars, within_list)", "def parse_list_args(args):\n\n args.image_transformers = parse_transformers(args.image_transformers)\n args.tensor_transformers = parse_transformers(args.tensor_transformers)\n args.test_image_transformers = parse_transformers(args.test_image_transformers)\n args.test_tensor_transformers = parse_transformers(args.test_tensor_transformers)\n\n args.block_layout = parse_block_layout(args.block_layout)", "def deformerEvaluator(*args, chains: bool=True, meshes: bool=True, q=True, query=True,\n **kwargs)->Union[List[AnyStr], Any]:\n pass", "def eval(*args, **kwargs)->Any:\n pass", "def reparam(string_, dictionary):\n dictionary = dictionary.copy() # eval mucks with it\n # disable builtins to avoid risk for remote code exection.\n dictionary['__builtins__'] = object()\n vals = []\n result = []\n for live, chunk in _interpolate(string_):\n if live:\n v = eval(chunk, dictionary)\n result.append(sqlquote(v))\n else: \n result.append(chunk)\n return SQLQuery.join(result, '')", "def parse(string):\n \n global local_vars\n print \"parse(\"+string+\")\"\n\n # variables\n if string in local_vars: # e.g. 'y'\n return string\n elif string == 'it':\n # print 'it: ',references[0]\n return g.it\n\n # operators\n elif string.find('\\gamma') == 0:\n return gamma(string[7],string[9:-1])\n elif string.find('\\iota') == 0:\n # treating iota as gamma for now\n return iota(string[6],string[8:-1])\n\n # function application\n else:\n fun = string.split( '(' , 1)[0]\n arg = parse(string.split( '(' , 1)[1][:-1])\n exec(fun+'(arg)')", "def _parse_arguments(text):\n parser = argparse.ArgumentParser(\n description=\"Build Python-based Rez packages in just a single command.\",\n )\n\n parser.add_argument(\n \"--hdas\",\n nargs=\"+\",\n help=\"The relative paths to each folder containing VCS-style Houdini HDAs.\",\n )\n\n parser.add_argument(\n \"-i\",\n \"--items\",\n nargs=\"+\",\n help=\"The relative paths to each file/folder to copy / install.\",\n )\n\n parser.add_argument(\n \"-e\",\n \"--eggs\",\n nargs=\"+\",\n help=\"The relative paths to each file/folder to make into a .egg file.\",\n )\n\n parser.add_argument(\n \"--symlink\",\n action=\"store_true\",\n default=linker.must_symlink(),\n help=\"If True, symlink everything back to the source Rez package.\",\n )\n\n parser.add_argument(\n \"--symlink-files\",\n action=\"store_true\",\n default=linker.must_symlink_files(),\n help=\"If True, symlink files back to the source Rez package.\",\n )\n\n parser.add_argument(\n \"--symlink-folders\",\n action=\"store_true\",\n default=linker.must_symlink_folders(),\n help=\"If True, symlink folders back to the source Rez package.\",\n )\n\n known, _ = parser.parse_known_args(text)\n\n return known", "def parametrize(argnames=None, # type: Union[str, Tuple[str], List[str]]\n argvalues=None, # type: Iterable[Any]\n indirect=False, # type: bool\n ids=None, # type: Union[Callable, Iterable[str]]\n idstyle=None, # type: Union[str, Callable]\n idgen=_IDGEN, # type: Union[str, Callable]\n auto_refs=True, # type: bool\n scope=None, # type: str\n hook=None, # type: Callable[[Callable], Callable]\n debug=False, # type: bool\n **args):\n # type: (...) -> Callable[[T], T]\n _decorate, needs_inject = _parametrize_plus(argnames, argvalues, indirect=indirect, ids=ids, idgen=idgen,\n auto_refs=auto_refs, idstyle=idstyle, scope=scope,\n hook=hook, debug=debug, **args)\n if needs_inject:\n @inject_host\n def _apply_parametrize_plus(f, host_class_or_module):\n return _decorate(f, host_class_or_module)\n return _apply_parametrize_plus\n else:\n return _decorate", "def build_grammer(ast):\n # example:\n # ['ldw', ['tgt', 'reg'], ',', ['offset', 's7'], '(', ['base', 'reg'], ')']\n def lookup(name, type, modifier=None):\n g = globals()[type].setResultsName(name).setName(name)\n if modifier == plus:\n g = pp.Group(pp.delimitedList(g, delim=\",\"))\n def setname(s, l, t):\n # if token is a str object, don't set name/type\n if isinstance(t[0], str):\n return\n t[0].name = name\n t[0].type = type\n g.addParseAction(setname)\n return g\n def punct(c):\n return pp.Suppress(c).setResultsName(c).setName(c)\n if isinstance(ast[0], str):\n g = pp.Literal(ast[0]).setResultsName(ast[0]).setName(ast[0])\n for arg in ast[1:]:\n g += punct(arg) if isinstance(arg, str) else lookup(*arg)\n return g", "def parse_transformers(raw_transformers):\n transformers = []\n for t in raw_transformers:\n arguments = t.split('/')\n name = arguments[0]\n if name == '':\n raise Exception(EMPTY_NAME_ERR)\n\n kwargs = {}\n if len(arguments) > 1:\n for a in arguments[1:]:\n splited = a.split('=')\n var = splited[0]\n val = splited[1] if len(splited) > 1 else None\n if var == '':\n raise Exception(EMPTY_NAME_ERR)\n\n kwargs[var] = val\n\n transformers.append((name, kwargs))\n\n return transformers", "def _gen_ids(argnames, argvalues, idgen):\n if not callable(idgen):\n # idgen is a new-style string formatting template\n if not isinstance(idgen, string_types):\n raise TypeError(\"idgen should be a callable or a string, found: %r\" % idgen)\n\n _formatter = idgen\n\n def gen_id_using_str_formatter(**params):\n try:\n # format using the idgen template\n return _formatter.format(**params)\n except Exception as e:\n raise InvalidIdTemplateException(_formatter, params, e)\n\n idgen = gen_id_using_str_formatter\n\n if len(argnames) > 1:\n ids = [idgen(**{n: v for n, v in zip(argnames, _argvals)}) for _argvals in argvalues]\n else:\n _only_name = argnames[0]\n ids = [idgen(**{_only_name: v}) for v in argvalues]\n\n return ids", "def __init__(self,literal,bindings,facts):\n\n self.literal = literal\n self.bindings = bindings\n self.facts = []\n for fact in facts:\n lit_pred = self.literal.split('(')[0].strip()\n fact_pred = fact.split('(')[0].strip()\n lit_args = self.literal.split('(')[1][:-1].split(',')\n fact_args = fact.split('(')[1][:-1].split(',')\n n = len(lit_args)\n m = len(fact_args)\n if lit_pred == fact_pred and n == m:\n self.facts.append(fact)", "def parse(seq):\n\tdef eval_expr(z, list):\n\t\treturn reduce(lambda s, (f, x): f(s, x), list, z)\n\tunarg = lambda f: lambda x: f(*x)\n\tconst = lambda x: lambda _: x # like ^^^ in Scala\n\n\ttokval = lambda x: x.value # returns the value of a token\n\top = lambda s: a(Token('Op', s)) >> tokval # return the value if token is Op\n\top_ = lambda s: skip(op(s)) # checks if token is Op and ignores it\n\ttoktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token\n\tdef lst(h,t):\n\t\treturn [h,] + t\n\tcall = lambda x: Call(x[0], x[1])\n\n\tmakeop = lambda s, f: op(s) >> const(f)\n\n\tadd = makeop('+', Plus)\n\tsub = makeop('-', Minus)\n\tmul = makeop('*', Times)\n\tdiv = makeop('/', Div)\n\n\tdef make_const(i):\n\t\treturn const(int(i))\n\n\tnumber = toktype('Number') >> Const\n\n\tmul_op = mul | div\n\tadd_op = add | sub\n\n\tfactor = with_forward_decls(lambda:\n\t\tnumber | op_('(') + exp + op_(')') | call)\n\tterm = factor + many(mul_op + factor) >> unarg(eval_expr)\n\texp = term + many(add_op + term) >> unarg(eval_expr)\n\texp_lst = with_forward_decls(lambda:\n\t\texp + many(op_(',') + exp) >> unarg(lst))\n\tcall = toktype('Name') + op_('(') + exp_lst + op_(')') >> call\n\n\treturn exp.parse(seq)", "def _represent_args(*args, **kwargs):\n argument_strings = [repr(a) for a in args]\n keyword_strings = [\"=\".join((k, repr(v))) for k, v in kwargs.items()]\n return \", \".join(argument_strings + keyword_strings)", "def expression_maker(ex_names, stat_type):\n if len(ex_names) == 1:\n expression = part_expression(ex_names[0], stat_type)\n else:\n current_part = ex_names.pop(-1)\n expression = (expression_maker(ex_names, stat_type) + ','\n + part_expresion(current_part, stat_type))\n\n return expression", "def test_dynamic_variable_generation_surprising():\n a = Step('a')\n b= Step(a, 1, 2)\n res = do_eval(b, a=\"adios\", adios=op.add)\n assert res(1, 2) == 3", "def dynExpression(*args, creation: bool=True, name: Union[AnyStr, bool]=\"\", runtime: bool=True,\n runtimeAfterDynamics: bool=True, runtimeBeforeDynamics: bool=True, string:\n AnyStr=\"\", q=True, query=True, e=True, edit=True, **kwargs)->Union[AnyStr,\n Any]:\n pass", "def allargs(symbol, fact, expr):\n return And(*[fact.subs(symbol, arg) for arg in expr.args])", "def init_from_arguments(\n cls,\n args: List[str],\n random_state: RandomState,\n pi: Optional[Policy]\n ) -> Tuple[List[Agent], List[str]]:\n\n parsed_args, unparsed_args = parse_arguments(cls, args)\n\n # grab and delete epsilons from parsed arguments\n epsilons = parsed_args.epsilon\n del parsed_args.epsilon\n\n # initialize agents\n agents = [\n EpsilonGreedy(\n name=f'epsilon-greedy (e={epsilon:0.2f})',\n random_state=random_state,\n epsilon=epsilon,\n **vars(parsed_args)\n )\n for epsilon in epsilons\n ]\n\n return agents, unparsed_args", "def eval(self, *args, **kwargs):\n raise NotImplementedError", "def my_evalf(expr, chop=False):\r\n if type(expr) == list:\r\n try:\r\n return [x.evalf(chop=chop) for x in expr]\r\n except:\r\n return expr\r\n try:\r\n return expr.evalf(chop=chop)\r\n except:\r\n return expr", "def split_arguments(s, windows=IS_WINDOWS):\n # from http://stackoverflow.com/a/35900070\n if windows:\n RE_CMD_LEX = r'''\"((?:\"\"|\\\\[\"\\\\]|[^\"])*)\"?()|(\\\\\\\\(?=\\\\*\")|\\\\\")'\n r'|(&&?|\\|\\|?|\\d?>|[<])|([^\\s\"&|<>]+)|(\\s+)|(.)'''\n else:\n RE_CMD_LEX = r'''\"((?:\\\\[\"\\\\]|[^\"])*)\"|'([^']*)'|(\\\\.)|'\n r'(&&?|\\|\\|?|\\d?\\>|[<])|([^\\s'\"\\\\&|<>]+)|(\\s+)|(.)'''\n\n args = []\n accu = None # collects pieces of one arg\n for qs, qss, esc, pipe, word, white, fail in re.findall(RE_CMD_LEX, s):\n if word:\n pass # most frequent\n elif esc:\n word = esc[1]\n elif white or pipe:\n if accu is not None:\n args.append(accu)\n if pipe:\n args.append(pipe)\n accu = None\n continue\n elif fail:\n raise ValueError(\"invalid or incomplete shell string\")\n elif qs:\n word = qs.replace('\\\\\"', '\"').replace('\\\\\\\\', '\\\\')\n if windows:\n word = word.replace('\"\"', '\"')\n else:\n word = qss # may be even empty; must be last\n\n accu = (accu or '') + word\n\n if accu is not None:\n args.append(accu)\n\n return args", "def process_attrs(expr, get, key, val):\n for domain in safe_eval(expr).values():\n if not isinstance(domain, list):\n continue\n for arg in domain:\n if isinstance(arg, (tuple, list)):\n process_expr(str(arg[0]), get, key, expr)", "def argument_list_quote(arguments):\n args = []\n for arg in arguments:\n args.append(argument_quote(arg))\n return '\"%s\"' % ' '.join(args)", "def string_to_class(names):\n return [eval(name) for name in names]", "def build(*components_with_tokens):\n\n res = {}\n for component_with_token in components_with_tokens:\n component, sep, build_token = component_with_token.partition(\"@\")\n assert sep == \"@\"\n # TODO: put your build logic in here!\n res[component] = \"example\"\n return res", "def _expand_args(arglst):\n if not isinstance(arglst, Iterable):\n arglst = [arglst]\n elif isinstance(arglst, dict):\n arglst = [arglst]\n elif ('theano' in sys.modules\n and isinstance(arglst, _gettheano().graph.basic.Variable)):\n arglst = [arglst]\n elif isinstance(arglst, cf.TerminatingTypes):\n arglst = [arglst]\n for arg in arglst:\n if 'theano' in sys.modules and isinstance(arg, _gettheano().graph.basic.Variable):\n # Theano variables aren't iterable\n yield arg\n elif isinstance(arg, cf.TerminatingTypes):\n yield arg\n elif isinstance(arg, slice):\n yield arg.start\n yield arg.stop\n yield arg.step\n elif isinstance(arg, dict):\n for key in arg.keys():\n yield key\n for val in arg.values():\n #yield from nwlst.extend(_expand_args(val))\n yield from _expand_args(val)\n elif isinstance(arg, np.ndarray):\n if arg.ndim == 0:\n yield arg # can't iterate over a 0-dim array\n else:\n yield from _expand_args(arg)\n elif isinstance(arg, Iterable):\n try:\n yield from _expand_args(arg)\n except TypeError:\n # Pint objects with scalars report 'iterable' but then fail\n # on __iter__. Might be the case with other objects as well.\n # For Pint, see https://github.com/hgrecco/pint-pandas/issues/33#issuecomment-647198749\n # Should be fixed by this currently open PR https://github.com/hgrecco/pint/pull/1125\n yield arg\n else:\n yield arg", "def parse(string):\r\n \r\n global local_vars\r\n # print \"parse(\"+string+\")\"\r\n\r\n # variables\r\n if string in local_vars: # e.g. 'y'\r\n return string\r\n elif string == 'it':\r\n # print 'it: ',references[0]\r\n return g.it\r\n\r\n # operators\r\n elif string.find('\\gamma') == 0:\r\n return gamma(string[7],string[9:-1])\r\n elif string.find('\\iota') == 0:\r\n # treating iota as gamma for now\r\n return iota(string[6],string[8:-1])\r\n\r\n # function application\r\n else:\r\n fun = string.split( '(' , 1)[0]\r\n arg = parse(string.split( '(' , 1)[1][:-1])\r\n exec(fun+'(arg)')", "def process_list_arg(arg):\n if isinstance(arg, list):\n return arg\n elif isinstance(arg, basestring):\n args = []\n for part in arg.split(\",\"):\n args.append(part.strip())\n return args", "def interpret_instruction(inst, variables=None):\n if isinstance(inst, list):\n res = [interpret_instruction(_, variables) for _ in inst]\n if any(res):\n return [_ for _ in res if _ is not None]\n return None\n if isinstance(inst, tuple):\n if len(inst) != 2 or inst[1] is None:\n raise ValueError( # pragma: no cover\n f\"Unable to interpret '{inst}'.\")\n return (inst[0], interpret_instruction(inst[1], variables))\n if isinstance(inst, dict):\n return inst\n if isinstance(inst, (int, float)):\n return inst\n\n inst = inst.replace(\"\\n\", \" \")\n exp = re.compile(\"^ *if +(.*) +then +(.*)( +else +(.*))? +fi *$\")\n find = exp.search(inst)\n if find:\n gr = find.groups()\n try:\n e = evaluate_condition(gr[0], variables)\n except SyntaxError:\n # We assume the condition is a linux condition.\n return inst\n g = gr[1] if e else gr[3]\n return None if g is None else interpret_instruction(g, variables)\n\n if inst.startswith('--'):\n # one format like --CMD=...; --NAME==...;\n exp = re.compile(\"--([a-zA-Z]+?)=(.+?);;\")\n find = exp.findall(inst)\n if find:\n inst = {k.strip(): v.strip() for k, v in find}\n inst = {k: (None if not v or len(v) == 0 else v)\n for k, v in inst.items()}\n return inst\n return inst\n return inst", "def parse(args: list, keyword_set: set) -> dict:\n parsed_dict = {'': []}\n while args:\n keyword = get_keyword(arg=args[0], keyword_set=keyword_set)\n\n if keyword is not None:\n args.pop(0)\n keyword_name = keyword.keyword_name\n\n if keyword_name in parsed_dict:\n raise necrobot.exception.DoubledArgException(keyword=keyword.keyword)\n\n if keyword.param_for is not None:\n parsed_dict[keyword_name] = [keyword.keyword]\n else:\n parsed_dict[keyword_name] = []\n num_args_pulled = 0\n while num_args_pulled < keyword.num_args:\n if not args:\n raise necrobot.exception.NumParametersException(\n keyword=keyword,\n num_expected=keyword.num_args,\n num_given=num_args_pulled\n )\n else:\n num_args_pulled += 1\n parsed_dict[keyword_name].append(args[0])\n args.pop(0)\n else:\n parsed_dict[''].append(args[0])\n args.pop(0)\n\n return parsed_dict", "def _parse_value(\n value_expr: str, target_expr: str, ref_parts: List[str],\n a_type: mapry.Type, registry_exprs: Mapping[mapry.Class, str],\n auto_id: mapry.py.generate.AutoID, py: mapry.Py) -> str:\n # pylint: disable=too-many-branches\n if isinstance(a_type, mapry.Boolean):\n body = _parse_boolean(\n value_expr=value_expr,\n target_expr=target_expr,\n ref_parts=ref_parts,\n auto_id=auto_id)\n\n elif isinstance(a_type, mapry.Integer):\n body = _parse_integer(\n value_expr=value_expr,\n target_expr=target_expr,\n ref_parts=ref_parts,\n a_type=a_type,\n auto_id=auto_id)\n\n elif isinstance(a_type, mapry.Float):\n body = _parse_float(\n value_expr=value_expr,\n target_expr=target_expr,\n ref_parts=ref_parts,\n a_type=a_type,\n auto_id=auto_id)\n\n elif isinstance(a_type, mapry.String):\n body = _parse_string(\n value_expr=value_expr,\n target_expr=target_expr,\n ref_parts=ref_parts,\n a_type=a_type,\n auto_id=auto_id)\n\n elif isinstance(a_type, mapry.Path):\n body = _parse_path(\n value_expr=value_expr,\n target_expr=target_expr,\n ref_parts=ref_parts,\n a_type=a_type,\n auto_id=auto_id,\n py=py)\n\n elif isinstance(a_type, mapry.Date):\n body = _parse_date(\n value_expr=value_expr,\n target_expr=target_expr,\n ref_parts=ref_parts,\n a_type=a_type,\n auto_id=auto_id)\n\n elif isinstance(a_type, mapry.Datetime):\n body = _parse_date_time(\n value_expr=value_expr,\n target_expr=target_expr,\n ref_parts=ref_parts,\n a_type=a_type,\n auto_id=auto_id)\n\n elif isinstance(a_type, mapry.Time):\n body = _parse_time(\n value_expr=value_expr,\n target_expr=target_expr,\n ref_parts=ref_parts,\n a_type=a_type,\n auto_id=auto_id)\n\n elif isinstance(a_type, mapry.TimeZone):\n body = _parse_time_zone(\n value_expr=value_expr,\n target_expr=target_expr,\n ref_parts=ref_parts,\n a_type=a_type,\n auto_id=auto_id,\n py=py)\n\n elif isinstance(a_type, mapry.Duration):\n body = _parse_duration(\n value_expr=value_expr,\n target_expr=target_expr,\n ref_parts=ref_parts,\n a_type=a_type,\n auto_id=auto_id)\n\n elif isinstance(a_type, mapry.Array):\n body = _parse_array(\n value_expr=value_expr,\n target_expr=target_expr,\n ref_parts=ref_parts,\n a_type=a_type,\n registry_exprs=registry_exprs,\n auto_id=auto_id,\n py=py)\n\n elif isinstance(a_type, mapry.Map):\n body = _parse_map(\n value_expr=value_expr,\n target_expr=target_expr,\n ref_parts=ref_parts,\n a_type=a_type,\n registry_exprs=registry_exprs,\n auto_id=auto_id,\n py=py)\n\n elif isinstance(a_type, mapry.Class):\n body = _parse_instance_reference(\n value_expr=value_expr,\n target_expr=target_expr,\n ref_parts=ref_parts,\n a_type=a_type,\n registry_expr=registry_exprs[a_type],\n auto_id=auto_id)\n\n elif isinstance(a_type, mapry.Embed):\n body = _parse_embed(\n target_expr=target_expr,\n value_expr=value_expr,\n ref_parts=ref_parts,\n a_type=a_type,\n registry_exprs=registry_exprs,\n auto_id=auto_id,\n py=py)\n\n else:\n raise NotImplementedError(\n \"Unhandled parsing of type: {}\".format(a_type))\n\n return body", "def group(*args:List[str]) -> str:\n return f'(?:{\"\".join(args)})'", "def parse(args, query):\n\n global query_type\n\n # Deal first with requests for definition or pronunciation\n # 1. Make the code easier to read\n first_word = args[0]\n second_word = args[1] if len(args) > 1 else \"\"\n third_word = args[2] if len(args) > 2 else \"\"\n fourth_word = args[3] if len(args) > 3 else \"\"\n # we use the teranary operator (this if ____ else that) to avoid an IndexError\n # IndexError would be raised if we tried to access the second element (args[1])\n # in a list which contained only one item (eg args == [\"lonely\"])\n # the teranary operator (in most languages it looks like \"____ ? this : that\")\n # returns \"this\" when the if is true and \"that\" when the if is false\n # meaning, if len(args) is NOT greater than 1, second_word == \"\"\n\n # 2. Check for keywords in the list of arguments\n # Example: nostrum defined\n # Example: pronunciation of otolaryngology\n if first_word == \"define\":\n # e.g. if the first word is \"define\" we'll add the second word to the query\n query = {\"sp\": second_word, \"md\": \"d\", \"max\": \"1\", \"qe\": \"sp\", \"ipa\": \"1\"}\n # the query is a dictionary of GET parameters for the http request, eg\n # https://api.datamuse.com/words?max=1&sp=SECOND_WORD_HERE&qe=sp&md=d&ipa=1\n elif second_word == \"defined\" or second_word == \"definition\":\n query = {\"sp\": first_word, \"md\": \"d\", \"max\": \"1\", \"qe\": \"sp\", \"ipa\": \"1\"}\n # this one uses string interpolation (the f\"\" stuff)\n elif f\"{second_word} {third_word}\" == \"means what\":\n query = {\"sp\": first_word, \"md\": \"d\", \"max\": \"1\", \"qe\": \"sp\", \"ipa\": \"1\"}\n elif f\"{second_word} {third_word} {fourth_word}\" == \"is said how\":\n query = {\"sp\": first_word, \"md\": \"r\", \"max\": \"1\", \"qe\": \"sp\", \"ipa\": \"1\"}\n # this one uses regular expressions -- i.e. if the second_word is \"of\" or \"for\"\n elif first_word == \"definition\" and re.match(r'(of)|(for)',second_word):\n query = {\"sp\": third_word, \"md\": \"d\", \"max\": \"1\", \"qe\": \"sp\", \"ipa\": \"1\"}\n # the is_pronounced function returns true if first_word is a (mis)spelling of pronounced\n elif re.match(r'(of)|(for)',second_word) and is_pronounced(first_word):\n query = {\"sp\": third_word, \"md\": \"r\", \"max\": \"1\", \"qe\": \"sp\", \"ipa\": \"1\"}\n # the ordering in the above list is not entirely random\n # since an if-elif-else statement won't keep evaluating after it finds a match\n # it makes sense to put the most computationally complex clauses at the end\n # >>> import timeit\n # >>> timeit.timeit('from word_helpers import is_pronounced; is_pronounced(\"pronounced\")', number=10000)\n # 0.022870146989589557\n # >>> timeit.timeit('args = [\"defined\"]; args[0] == \"defined\"', number=10000)\n # 0.002359684993280098\n # it takes 2 milliseconds to compare a string in a list 10,000 times\n # -- versus 2 centiseconds to run is_pronounced 10,000 times\n # (on my Intel Core i5 2.67GHz CPU -- obviously speed depends on the processor)\n # it's also worth noting that readability counts more than speed optimization (most of the time!)\n\n # Quick way to check if any of the above if statements matched\n if \"sp\" in query:\n # if so, we are done in this function\n if query[\"md\"] == \"r\": query_type = \"PRO\"\n if query[\"md\"] == \"d\": query_type = \"DEF\"\n return query\n\n # these will be useful later\n STOP_WORDS = (\"and\", \"meaning\", \"means\", \"max\", \"about\", \"which\", \"that\")\n\n # Parse more complicated requests for synonyms, etc\n # 0 is false in python, so this loop will run until we've removed all the args\n while len(args):\n # we must reset these vars each time the loop starts\n # in case we've deleted items from the args list\n first_word = args[0]\n second_word = args[1] if len(args) > 1 else \"\"\n third_word = args[2] if len(args) > 2 else \"\"\n # we use the teranary operator (this if ____ else that) to avoid an IndexError\n # IndexError would be raised if we tried to access the second element (args[1])\n # in a list which contained only one item (eg args == [\"lonely\"])\n # the teranary operator (in most languages it looks like \"____ ? this : that\")\n # returns \"this\" when the if is true and \"that\" when the if is false\n # meaning, if len(args) is NOT greater than 1, second_word == \"\"\n\n # Disambiguate homonym requests from spelling correction requests\n # Example: sounding like tung\n # Example: sounds like doe but spelled differently\n if re.match(r'sound((s)|(ing)) like',f\"{first_word} {second_word}\"):\n\n # again, use len(args) to avoid an IndexError\n if len(args) >= 6 and \\\n re.match(r'((but)|(except)) spelled different(ly)?',f\"{args[3]} {args[4]} {args[5]}\"):\n # but instead of teranary operator,\n # use \"short circuit logic\" -- when python sees \"if __A__ and __B__ \",\n # it knows that if A is false, the whole thing will be false\n # (you can't have \"ice cream and potatoes\" for dinner if you don't have ice cream)\n # and it won't waste time evaluating B, so re.match won't run and args[4]\n # won't be accessed and no IndexError will be raised, yay!\n # regex explained: ? means the prior thing matched zero or one times\n # different(ly)? matches \"different\" and \"differently\"\n query[\"rel_hom\"] = third_word\n # now, delete 6 items from args, starting at item 0\n del args[0:6]\n else:\n query[\"sl\"] = third_word\n del args[0:3]\n\n # Example: spelled like 'cens?r'\n elif re.match(r'spell((ed)|(ing)) like',f\"{first_word} {second_word}\"):\n # two stars (**) means \"unpack\" a dictionary\n # just like unpacking a suitcase, we've dumped the old contents of query\n # into a new dictionary (which we are saving with the same variable name!)\n query = {**query,\"sp\": third_word}\n # query[\"sp\"] = third_word also works fine\n # just showing off how to combine two dictionaries :)\n del args[0:3]\n\n # Example: rhymes with culminate\n elif len(args) > 2 and second_word == \"with\" and is_rhymes(first_word):\n query[\"rel_rhy\"] = third_word\n del args[0:3]\n\n # Example: almost rhymes with culminate\n elif len(args) > 3 and \\\n f\"{first_word} {third_word}\" == \"almost with\" and \\\n is_rhymes(second_word):\n query[\"rel_nry\"] = args[3] # fourth_word\n del args[0:4]\n\n # Example: comes after sea\n elif f\"{first_word} {second_word}\" == \"comes after\":\n query[\"lc\"] = third_word\n del args[0:3]\n elif first_word == \"follows\":\n query[\"lc\"] = second_word\n del args[0:2]\n elif f\"{first_word} {second_word}\" == \"comes before\":\n query[\"rc\"] = third_word\n del args[0:3]\n elif first_word == \"preceeds\":\n query[\"rc\"] = second_word\n del args[0:2]\n\n # Example: describes paint\n elif first_word == \"describes\":\n query[\"rel_jjb\"] = second_word\n del args[0:2]\n\n # Example: associated with feet\n elif f\"{first_word} {second_word}\" == \"associated with\" or \\\n f\"{first_word} {second_word}\" == \"triggered by\":\n query[\"rel_trg\"] = third_word\n del args[0:3]\n\n # Example: meaning feeling tired\n elif first_word in [\"means\",\"meaning\",\"like\"]:\n # get rid of first_word\n del args[0]\n # now short circuit logic again, plus using the tuple from ealier\n # b/c if we have \"meaning deer and sounds like roe\" we don't want\n # query[\"ml\"] == \"deer and sounds like roe\" -- it should be just \"deer\"\n while len(args) and args[0] not in STOP_WORDS:\n # teranary operator prevents KeyError if \"ml\" not already in query dictionary\n query[\"ml\"] = f\"{query['ml']} {args[0]}\" if \"ml\" in query else args[0]\n del args[0]\n # an example with the previous code to make things clearer\n # say args == [\"means\", \"egg\", \"beater\", \"and\", \"max\", \"35\"]\n # first_word IS in [\"means\",\"meaning\",\"like\"]\n # del first_word, args is now [\"egg\", \"beater\", \"and\", \"max\", \"35\"]\n # len(args) == 5, args[0] is NOT in STOP_WORDS\n # \"ml\" is NOT in query, so teranary returns args[0] (\"egg\")\n # args[0] is copied to query[\"ml\"] (query is now {ml: \"egg\"})\n # del args[0], args is now [\"beater\", \"and\", \"max\", \"35\"]\n # return to top of while loop, len(args) == 4, args[0] is NOT in STOP_WORDS\n # \"ml\" IS in query, so teranary returns f\"{query['ml']} {args[0]}\" (\"egg beater\") \n # f\"{query['ml']} {args[0]}\" is copied to query[\"ml\"]\n # (query is now {ml: \"egg beater\"})\n # del args[0], args is now [\"and\", \"max\", \"35\"]\n # return to top of while loop, len(args) == 3,\n # args[0] IS in STOP_WORDS (args[0] == \"and\")\n # DO NOT enter the while loop, continue past this code block\n\n # Discover the topic of our query\n elif first_word == \"about\":\n del args[0]\n count = 0\n # Datamuse allows a max of five topic words\n while len(args) and args[0] not in STOP_WORDS and count <= 5:\n query[\"topics\"] = f\"{query['topics']} {args[0]}\" if \"topics\" in query else args[0]\n del args[0]\n # count += 1 is the same as count = count + 1\n count += 1\n\n # How many results to return (max 1000)\n elif first_word in [\"max\", \"maximum\", \"only\"]:\n user_max = convert_num(second_word)\n if user_max and int(user_max) <= 1000:\n query[\"max\"] = user_max\n del args[0:2]\n\n # Remove filler words if they weren't parsed out above\n elif first_word in [\"that\",\"which\",\"and\",\"like\",\"is\"]:\n del args[0]\n\n # Add anything not otherwise parsable to the ml parameter\n else:\n query[\"ml\"] = f\"{query['ml']} {first_word}\" if \"ml\" in query else first_word\n del args[0]\n\n # this is the bottom of that massive while loop\n # if args is not empty by now, we'll start over from the top ^\n\n return query\n # and this is the end of the \"def parse(args, query)\" function\n # whew!", "def _quote_arguments(args):\n return map(lambda x: '\"{}\"'.format(x) if ' ' in x else '{}'.format(x), args)", "def evalDeferred(*args, evaluateNext: bool=True, list: bool=True, lowPriority: bool=True,\n lowestPriority: bool=True, **kwargs)->List[AnyStr]:\n pass", "def opsplit(expstr):\n\n #ops are the one char operators (sorted on precidence)\n ops = expr.getOps()\n #Remove outer parentesis if we have them\n if expstr[0] == '(' and expstr[-1] == ')' and balanced(expstr[1:-1]):\n expstr = expstr[1:-1]\n #Add a '0' to the beginning of the string if we start with an operator\n if expstr[0] in ops:\n expstr = '0'+expstr\n for op in ops:\n pc = 0\n cc = len(expstr)-1\n revexpstr = list(expstr)\n revexpstr.reverse()\n #Search for the operator backwards (to preserve operator presidence)\n for c in revexpstr:\n if c == '(':\n pc += 1\n elif c == ')':\n pc -= 1\n if c == op and pc == 0:\n #Build the tree recursively\n return [op,opsplit(expstr[:cc]),opsplit(expstr[cc+1:])]\n cc -=1\n #if we find something that looks like a function, parse it separately \n if funcpattern(expstr):\n fnamestr = funcname(expstr)\n fargs = funcargs(expstr)\n farglist = [opsplit(arg) for arg in fargs]\n return [fnamestr]+farglist\n return expstr", "def get_primitives(base):\n\n operands = []\n operators = []\n for nparams, s in enumerate(base):\n s = s.replace('%', '%%').split()\n for s in (x.replace('_', ' ') for x in s):\n if nparams and '$' not in s:\n assert nparams in (1, 2)\n s = '%s%s$' % ('$' if nparams == 2 else '', s)\n assert nparams == s.count('$'), (nparams, s)\n s = s.replace('$', ' %s ').strip()\n\n # Normalize the spacing\n s = s.replace(' ,', ',')\n s = s.replace(' . ', '.')\n s = s.replace(' [ ', '[').replace(' ]', ']')\n s = s.replace(' ( ', '(').replace(' )', ')')\n if nparams == 1:\n s = s.replace('+ ', '+')\n s = s.replace('- ', '-')\n s = s.replace('~ ', '~')\n\n if nparams:\n operators.append((s, nparams))\n else:\n operands.append(s)\n return operators, operands", "def extract_arguments(start, string):\n\n arguments = []\n closures = {\n \"<\": 0,\n \"(\": 0\n }\n current_position = start\n argument_start_pos = current_position + 1\n\n # Search for final parenthesis\n while current_position < len(string):\n if string[current_position] == \"(\":\n closures[\"(\"] += 1\n elif string[current_position] == \")\":\n closures[\"(\"] -= 1\n elif string[current_position] == \"<\":\n closures[\"<\"] += 1\n elif string[current_position] == \">\" and string[current_position - 1] != \"-\" and closures[\"<\"] > 0:\n closures[\"<\"] -= 1\n\n # Finished all arguments\n if closures[\"(\"] == 0 and closures[\"<\"] == 0:\n # Add final argument\n arguments.append({\"start\": argument_start_pos, \"end\": current_position})\n break\n\n # Finished current argument\n if closures[\"(\"] == 1 and closures[\"<\"] == 0 and string[current_position] == \",\":\n arguments.append({\"start\": argument_start_pos, \"end\": current_position})\n argument_start_pos = current_position + 1\n\n current_position += 1\n\n return arguments", "def handle_arguments(self, string, root, opening, closing):\n\n\t\t# The actual argument string (ignore whitespace)\n\t\targs = string[opening + 1 : closing].replace(\" \", \"\")\n\n\t\t# The argument sequence must be at the start of the phrase\n\t\t# and must match the allowed argument regular expression\n\t\tif opening > 0 or not self.arguments.match(args):\n\n\t\t\tif opening == 0:\n\t\t\t\traise errors.ParseError(\"Invalid argument sequence!\")\n\n\t\t\t# If escape_meta does indeed escape a character and removes\n\t\t\t# a backward slash, the positions 'opening' and 'closing' are no\n\t\t\t# longer valid. escape_meta does a search for the next meta\n\t\t\t# character though, which is then the closing parantheses,\n\t\t\t# so we can use its index value (in the now escaped string)\n\t\t\tstring, meta = self.escape_meta(string, opening)\n\t\t\tstring, meta = self.escape_meta(string, meta.start())\n\n\t\t\treturn string, root, meta\n\n\t\tif \"!\" in args:\n\t\t\troot.override = True\n\t\t\targs = args.replace(\"!\", \"\")\n\n\t\tif \"+\" in args:\n\t\t\troot.increment = True\n\t\t\targs = args.replace(\"+\", \"\")\n\n\t\troot.arguments = [int(i) for i in args.split(\",\") if i]\n\n\t\t# Remove the argument string including parantheses\n\t\tstring = string[closing + 1:]\n\n\t\tmeta = self.meta.search(string)\n\n\t\treturn string, root, meta", "def _unroll(self, gate_definition, reg_list, param_list=None): # pylint: disable-msg=invalid-name, line-too-long\n ast_binder = ASTBinder(gate_definition, reg_list, param_list)\n for gate_op in gate_definition.get('gate_ops_list'):\n the_op = gate_op.get('op')\n the_reg_list = ast_binder.bind_regs(gate_op.get('op_reg_list'))\n the_param_list = None\n gate_op_param_list = gate_op.get('op_param_list')\n if gate_op_param_list: # Have to subst in the full param for symbolic name.\n the_param_list = []\n param_symbolic_names = ast_binder.bind_params(param_list)\n # DEBUG\n # print('gate_op_param_list: {}'.format(str(gate_op_param_list)))\n # print(param_symbolic_names)\n # EMD=DEBUG\n for i in range(0, len(gate_op_param_list)):\n if param_symbolic_names:\n the_param_list.append(gate_op_param_list[i].replace(param_symbolic_names[i],\n param_list[i]))\n else:\n if gate_op_param_list:\n the_param_list.append(gate_op_param_list[i])\n # DEBUG\n # print(\"******the_op {} the_reg_list {} the_param_list {}\".format(the_op, the_reg_list, the_param_list)) # pylint: disable-msg=line-too-long\n # END-DEBUG\n if not self._op_easy(the_op,\n the_reg_list,\n param_list=the_param_list if the_param_list else None):\n self._op_search(the_op,\n the_reg_list,\n param_list=the_param_list if the_param_list else None)", "def parse_string(\n raw_string: Text,\n variables_mapping: VariablesMapping,\n functions_mapping: FunctionsMapping,\n) -> Any:\n try:\n match_start_position = raw_string.index(\"$\", 0)\n parsed_string = raw_string[0:match_start_position]\n except ValueError:\n parsed_string = raw_string\n return parsed_string\n\n while match_start_position < len(raw_string):\n\n # Notice: notation priority\n # $$ > ${func($a, $b)} > $var\n\n # search $$\n dollar_match = dolloar_regex_compile.match(raw_string, match_start_position)\n if dollar_match:\n match_start_position = dollar_match.end()\n parsed_string += \"$\"\n continue\n\n # search function like ${func($a, $b)}\n func_match = function_regex_compile.match(raw_string, match_start_position)\n if func_match:\n func_name = func_match.group(1)\n func = get_mapping_function(func_name, functions_mapping)\n\n func_params_str = func_match.group(2)\n function_meta = parse_function_params(func_params_str)\n args = function_meta[\"args\"]\n kwargs = function_meta[\"kwargs\"]\n parsed_args = parse_data(args, variables_mapping, functions_mapping)\n parsed_kwargs = parse_data(kwargs, variables_mapping, functions_mapping)\n\n try:\n func_eval_value = func(*parsed_args, **parsed_kwargs)\n except Exception as ex:\n logger.error(\n f\"call function error:\\n\"\n f\"func_name: {func_name}\\n\"\n f\"args: {parsed_args}\\n\"\n f\"kwargs: {parsed_kwargs}\\n\"\n f\"{type(ex).__name__}: {ex}\"\n )\n raise\n\n func_raw_str = \"${\" + func_name + f\"({func_params_str})\" + \"}\"\n if func_raw_str == raw_string:\n # raw_string is a function, e.g. \"${add_one(3)}\", return its eval value directly\n return func_eval_value\n\n # raw_string contains one or many functions, e.g. \"abc${add_one(3)}def\"\n parsed_string += str(func_eval_value)\n match_start_position = func_match.end()\n continue\n\n # search variable like ${var} or $var\n var_match = variable_regex_compile.match(raw_string, match_start_position)\n if var_match:\n var_name = var_match.group(1) or var_match.group(2)\n var_value = get_mapping_variable(var_name, variables_mapping)\n\n if f\"${var_name}\" == raw_string or \"${\" + var_name + \"}\" == raw_string:\n # raw_string is a variable, $var or ${var}, return its value directly\n return var_value\n\n # raw_string contains one or many variables, e.g. \"abc${var}def\"\n parsed_string += str(var_value)\n match_start_position = var_match.end()\n continue\n\n curr_position = match_start_position\n try:\n # find next $ location\n match_start_position = raw_string.index(\"$\", curr_position + 1)\n remain_string = raw_string[curr_position:match_start_position]\n except ValueError:\n remain_string = raw_string[curr_position:]\n # break while loop\n match_start_position = len(raw_string)\n\n parsed_string += remain_string\n\n return parsed_string", "def equation_processor(eq: str, xmin: str, xmax: str, xstep: str):\n # Checks if there is variables x and t present\n if 'x' in eq and 't' in eq:\n return \"Cannot mix x and t variables\"\n\n # Checks if the domain is unusable\n elif xmin >= xmax:\n return \"Invalid Domain\"\n\n # Checks if xstep is usable\n try:\n eval(xstep)\n except NameError:\n return \"Invalid xstep\"\n finally:\n str(xstep)\n\n input_group = [eq.lower(), xmin, xmax, xstep]\n mult_err_code = re.compile(r' \\w\\(|\\)\\w|\\w[a-z]|[a-z]\\d')\n new_arr = []\n # for each of the passed parameters...\n for entry in input_group:\n # Run through the expression converter\n entry = expression_converter(entry)\n # And add the converted expressions to the array\n new_arr.append(entry)\n\n # Convert all parameters to a data type the grapher can use\n eq = new_arr[0].replace('x', '({x})').replace('t', '({x})')\n xmin = float(eval(new_arr[1]))\n xmax = float(eval(new_arr[2]))\n xstep = float(eval(new_arr[3]))\n\n x_arr = []\n y_arr = []\n d = abs(xmax / 1000)\n # Generate data points\n for xvar in arange(xmin, xmax, d):\n # Plug a value for x into the equation\n try:\n yvar = eval(eq.format(x=xvar))\n except TypeError:\n # If there is an issue from having incorrectly formatted eq (fixed by the expr converter)\n return \"No operator between characters\"\n except ValueError:\n # If the xmin or xmax are invalid for a function\n return \"Invalid Domain\"\n # Add the x and y values to the array's for pandas\n x_arr.append(xvar)\n y_arr.append(yvar)\n\n # Makes the tic marks on the graph\n xtic = arange(xmin, xmax+xstep, xstep)\n # Pushes the array of x and y positions into a database and graphs it\n grapher(db=database([x_arr, y_arr]), strng='line', grid=True, xstep=xtic)", "def addDynamic(*args, **kwargs)->AnyStr:\n pass", "def _process_inputs(args, kwargs) -> Any:\n if args and kwargs:\n input_values = (*args, kwargs)\n elif args and not kwargs:\n input_values = args[0] if len(args) == 1 else args\n elif kwargs and not args:\n input_values = kwargs\n else:\n input_values = ()\n\n return input_values", "def extract_argument_types(*args: Sequence[Any]) -> str:\n collapsed_args = []\n\n for arg in args:\n if is_list_like(arg):\n collapsed_nested = []\n for nested in arg:\n if is_list_like(nested):\n collapsed_nested.append(f\"({extract_argument_types(nested)})\")\n else:\n collapsed_nested.append(_get_argument_readable_type(nested))\n collapsed_args.append(\",\".join(collapsed_nested))\n else:\n collapsed_args.append(_get_argument_readable_type(arg))\n\n return \",\".join(collapsed_args)", "def Parse(dataproc, gke_cluster, arg_pools, support_shuffle_service=False):\n pools = [\n _GkeNodePoolTargetParser.Parse(dataproc, gke_cluster, arg_pool,\n support_shuffle_service)\n for arg_pool in arg_pools\n ]\n GkeNodePoolTargetsParser._ValidateUniqueNames(pools)\n GkeNodePoolTargetsParser._ValidateRoles(dataproc, pools)\n GkeNodePoolTargetsParser._ValidatePoolsHaveSameLocation(pools)\n GkeNodePoolTargetsParser._ValidateBootDiskKmsKeyPattern(pools)\n return pools", "def expand(*templates: Strings, **kwargs: Strings) -> List[str]:\n formats = flatten(*templates)\n results: List[str] = []\n data: Dict[str, Any] = {}\n\n def _collect(items: List[Tuple[str, Strings]]) -> None:\n if len(items) == 0:\n for template in formats:\n results.append(template.format(**data))\n else:\n name, values = items[0]\n for value in flatten(values):\n data[name] = value\n _collect(items[1:])\n\n _collect(list(kwargs.items()))\n\n return results", "def arg_parse_list(text, j):\n\n depth = 0\n loc2 = j\n arglist = []\n prev_start = j\n while 1:\n if text[loc2] == \"(\":\n if depth == 0:\n prev_start = loc2 + 1\n depth = depth + 1\n\n elif text[loc2] == \")\":\n depth = depth - 1\n if depth == 0:\n arglist.append(text[prev_start:loc2].strip())\n break\n\n elif text[loc2] == \",\":\n if depth == 1:\n arglist.append(text[prev_start:loc2].strip())\n prev_start = loc2 + 1\n elif text[loc2] == \"{\":\n depth = depth + 1\n elif text[loc2] == \"}\":\n depth = depth - 1\n loc2 = loc2 + 1\n return arglist", "def parseArgs(args):\n parsed = []\n for arg in args:\n print arg\n arg = arg.strip()\n interpretation = None\n try:\n interpretation = float(arg)\n if string.find(arg, \".\") == -1:\n interpretation = int(interpretation)\n except:\n # Oh - it was a string.\n interpretation = arg\n pass\n parsed.append(interpretation)\n return parsed", "def parse_generate_arguments(arguments):\n return_value = {}\n for key in arguments:\n return_value[key] = CONFIG_KEY_PARSER[key](arguments[key])\n\n return return_value", "def pythonize_args(contents):\n return contents\n \n contents = contents.replace(\"static\", \"\")\n contents = contents.replace(\"virtual void\", \"\")\n contents = contents.replace(\"virtual\", \"\")\n contents = contents.replace(\"void*\", \"int\")\n contents = contents.replace(\"void\", \"\")\n \n contents = contents.replace(\"off_t\", \"long\")\n contents = contents.replace(\"size_t\", \"long\")\n contents = contents.replace(\"*\", \"\")\n contents = contents.replace(\"&amp;\", \"\")\n contents = contents.replace(\"&\", \"\")\n contents = contents.replace(\"char\", \"string\") \n contents = contents.replace(\"wxChar\", \"string\") \n contents = contents.replace(\"wxCoord\", \"int\")\n contents = contents.replace(\"<A HREF=\\\"wx_wxstring.html#wxstring\\\">wxString</A>\", \"string\")\n \n return pythonize_text(contents)", "def expand(self, s, lvars, within_list):\n\n if is_String(s):\n try:\n s0, s1 = s[:2]\n except (IndexError, ValueError):\n self.append(s)\n return\n if s0 != '$':\n self.append(s)\n return\n if s1 == '$':\n self.append('$')\n elif s1 == '(':\n self.open_strip('$(')\n elif s1 == ')':\n self.close_strip('$)')\n else:\n key = s[1:]\n if key[0] == '{' or key.find('.') >= 0:\n if key[0] == '{':\n key = key[1:-1]\n\n # Store for error messages if we fail to expand the\n # value\n old_s = s\n s = None\n if key in lvars:\n s = lvars[key]\n elif key in self.gvars:\n s = self.gvars[key]\n else:\n try:\n s = eval(key, self.gvars, lvars)\n except KeyboardInterrupt:\n raise\n except Exception as e:\n if e.__class__ in AllowableExceptions:\n return\n raise_exception(e, lvars['TARGETS'], old_s)\n\n if s is None and NameError not in AllowableExceptions:\n raise_exception(NameError(), lvars['TARGETS'], old_s)\n elif s is None:\n return\n\n # If the string is already full expanded there's no\n # need to continue recursion.\n if self.expanded(s):\n self.append(s)\n return\n\n # Before re-expanding the result, handle\n # recursive expansion by copying the local\n # variable dictionary and overwriting a null\n # string for the value of the variable name\n # we just expanded.\n lv = lvars.copy()\n var = key.split('.')[0]\n lv[var] = ''\n self.substitute(s, lv, 0)\n self.this_word()\n elif is_Sequence(s):\n for a in s:\n self.substitute(a, lvars, 1)\n self.next_word()\n elif callable(s):\n # SCons has the unusual Null class where any __getattr__ call returns it's self, \n # which does not work the signature module, and the Null class returns an empty\n # string if called on, so we make an exception in this condition for Null class\n # Also allow callables where the only non default valued args match the expected defaults\n # this should also allow functools.partial's to work.\n if isinstance(s, SCons.Util.Null) or {k for k, v in signature(s).parameters.items() if\n k in _callable_args_set or v.default == Parameter.empty} == _callable_args_set:\n\n s = s(target=lvars['TARGETS'],\n source=lvars['SOURCES'],\n env=self.env,\n for_signature=(self.mode != SUBST_CMD))\n else:\n # This probably indicates that it's a callable\n # object that doesn't match our calling arguments\n # (like an Action).\n if self.mode == SUBST_RAW:\n self.append(s)\n return\n s = self.conv(s)\n self.substitute(s, lvars, within_list)\n elif s is None:\n self.this_word()\n else:\n self.append(s)", "def test_callexpression_argument_traversal():\n\n DECLARATIONS = (\n 'function foo(x){}',\n 'var foo = function foo(x){}',\n 'var foo = (x) => {}',\n 'var foo = (x) => undefined',\n )\n for declaration in DECLARATIONS:\n assert not _do_test_raw(\"\"\"\n %s;\n foo({\"bar\":function(){\n bar();\n }});\n \"\"\" % declaration).failed()\n\n assert _do_test_raw(\"\"\"\n %s;\n foo({\"bar\":function(){\n eval(\"evil\");\n }});\n \"\"\" % declaration).failed()", "def get_additional_arguments(obj):\n args = ''\n\n try:\n # If the object has arguments parse them to a string and replace the values from the lookup table\n if obj.args:\n # Parse the list to a string\n args = ' '.join(obj.args)\n\n # Replace the items in the string with entries from the lookup table\n for k, v in config.lookup_table:\n args = args.replace(k, v)\n\n # Reset the args in the object to reset its state\n obj.args = None\n except AttributeError:\n # No additional args present so we can just skip the parsing\n pass\n\n return args", "def parse_argdict(extras):\n return [(key, value() if callable(value) else value) for key, value in extras.items()]", "def __build_argument_for_jump(arg_array: List[Argument], with_html_tag):\n tag_premise = ('<' + tag_type + ' data-argumentation-type=\"attack\">') if with_html_tag else ''\n tag_conclusion = ('<' + tag_type + ' data-argumentation-type=\"argument\">') if with_html_tag else ''\n tag_end = ('</' + tag_type + '>') if with_html_tag else ''\n lang = arg_array[0].lang\n _t = Translator(lang)\n\n if len(arg_array) == 1:\n ret_value = __build_val_for_jump(arg_array[0], tag_premise, tag_conclusion, tag_end, _t)\n\n elif len(arg_array) == 2:\n ret_value = __build_val_for_undercut(arg_array, tag_premise, tag_conclusion, tag_end, _t)\n\n else:\n ret_value = __build_val_for_undercutted_undercut(arg_array, tag_premise, tag_conclusion, tag_end, _t)\n\n return ret_value.replace(' ', ' ')", "def calc(bot, sender, sendmsg, label, args):\n\n expr = \" \".join(args)\n banned = dir() + dir(builtins)\n for word in banned:\n if word in expr:\n sendmsg(\"Illegal word found: \" + word)\n return\n try:\n sendmsg(eval(expr))\n except Exception as e:\n sendmsg(str(e))", "def formatargvalues(args, varargs, varkw, locals,\r\n formatarg=str,\r\n formatvarargs=lambda name: '*' + name,\r\n formatvarkw=lambda name: '**' + name,\r\n formatvalue=lambda value: '=' + repr(value),\r\n join=joinseq):\r\n def convert(name, locals=locals,\r\n formatarg=formatarg, formatvalue=formatvalue):\r\n return formatarg(name) + formatvalue(locals[name])\r\n specs = []\r\n for i in range(len(args)):\r\n specs.append(strseq(args[i], convert, join))\r\n if varargs:\r\n specs.append(formatvarargs(varargs) + formatvalue(locals[varargs]))\r\n if varkw:\r\n specs.append(formatvarkw(varkw) + formatvalue(locals[varkw]))\r\n return '(' + string.join(specs, ', ') + ')'", "def map_parameters(arguments, *args):\n for arg in args:\n if arg in ['clear', 'copy', 'fromkeys', 'get', 'items', 'keys', 'pop',\n 'popitem', 'setdefault', 'update', 'values', 'format',\n 'type']:\n Console.error(f'`{arg}` is predefined method.'\n f' Use `arguments[\"--{arg}\"]` in your code')\n raise ValueError(f\"{arg} already used in arguments\")\n elif arg in arguments:\n Console.error(f'`{arg}` is already used in arguments.'\n f' Use `arguments[\"--{arg}\"]` in your code')\n raise ValueError(f\"{arg} already used in arguments\")\n else:\n flag = \"--\" + arg\n if flag in arguments:\n value = arguments[flag]\n else:\n value = None\n arguments[arg] = value", "def arguments_pattern(arguments):\n pattern = []\n \n # reserved keywords for composite commands\n reserved_keywords = (\"to\", \"with\", \">\", \"<\", \"=\", \"apartment\", \"type\")\n \n # check the type of each argument and create a pattern\n for arg in arguments:\n if arg in reserved_keywords:\n pattern.append(arg)\n continue\n \n arg_type = argument_type(arg)\n \n if arg_type == float:\n pattern.append(\"float\")\n elif arg_type == int:\n pattern.append(\"int\")\n else: \n pattern.append(\"string\")\n \n # remove the keywords from the arguments to be able to handle them\n for reserved in reserved_keywords:\n if reserved in arguments:\n arguments.remove(reserved)\n \n # return the pattern as a string\n return \" \".join(pattern)", "def parse_helper(attrs, attrs_name, alt_value=None):\n tuple_re = re.compile('\\([0-9L|,| ]+\\)')\n if not attrs:\n return alt_value\n attrs_str = None if attrs.get(attrs_name) is None else str(attrs.get(attrs_name))\n if attrs_str is None:\n return alt_value\n attrs_match = tuple_re.search(attrs_str)\n if attrs_match is not None:\n if attrs_match.span() == (0, len(attrs_str)):\n dims = eval(attrs_str)\n return dims\n else:\n raise AttributeError(\"Malformed %s dimensions: %s\" % (attrs_name, str(attrs_str)))\n return alt_value", "def evaluator(*args, clusters: bool=True, configuration: Union[AnyStr, List[AnyStr], bool]=\"\",\n enable: bool=True, info: bool=True, name: Union[AnyStr, bool]=\"\", nodeType:\n Union[AnyStr, List[AnyStr], bool]=\"\", nodeTypeChildren: bool=True, priority:\n Union[int, bool]=0, valueName: Union[AnyStr, bool]=\"\", q=True, query=True,\n **kwargs)->Union[List[AnyStr], Any]:\n pass", "def _parse(cls, tokens, *, get_params=False):\n\n\t\tif get_params:\n\t\t\tresult = []\n\t\telse:\n\t\t\tresult = None\n\n\t\tfor t in tokens:\n\t\t\tnew = None\n\t\t\tdone = False\n\n\t\t\tif t.kind == 'OPEN':\n\t\t\t\tnew = cls._parse(tokens)\n\t\t\telif t.kind in {'CLOSE', 'DOT'}:\n\t\t\t\tdone = True\n\t\t\telif t.kind == 'LAMBDA':\n\t\t\t\tparams = cls._parse(tokens, get_params=True)\n\n\t\t\t\tif not params:\n\t\t\t\t\traise LambdaError('No parameters in lambda', t.line, t.pos)\n\n\t\t\t\tbody = cls._parse(tokens)\n\n\t\t\t\tif not body:\n\t\t\t\t\traise LambdaError('No body in lambda', t.line, t.pos)\n\n\t\t\t\tnew = Abs(params[-1], body, line=t.line, pos=t.pos)\n\n\t\t\t\tfor param in params[-2::-1]:\n\t\t\t\t\tnew = Abs(param, new, line=t.line, pos=t.pos)\n\n\t\t\t\tdone = True\n\t\t\telif t.kind == 'EQUAL':\n\t\t\t\tvar = cls._parse(tokens)\n\n\t\t\t\tif not var:\n\t\t\t\t\traise LambdaError('No variable to assign to', t.line, t.pos)\n\n\t\t\t\tvalue = cls._parse(tokens)\n\n\t\t\t\tif not value:\n\t\t\t\t\traise LambdaError('No value to assign: ' + var.name, t.line, t.pos)\n\n\t\t\t\tnew = Ass(var, value, line=t.line, pos=t.pos)\n\n\t\t\t\tdone = True\n\t\t\telif t.kind == 'QUERY':\n\t\t\t\tvalue = cls._parse(tokens)\n\n\t\t\t\tif not value:\n\t\t\t\t\traise LambdaError('No value to query', t.line, t.pos)\n\n\t\t\t\tnew = Que(value, line=t.line, pos=t.pos)\n\n\t\t\t\tdone = True\n\t\t\telif t.kind == 'SYMBOL':\n\t\t\t\tnew = Var(t.value, line=t.line, pos=t.pos)\n\n\t\t\tif new is not None:\n\t\t\t\tif get_params:\n\t\t\t\t\tresult.append(new)\n\t\t\t\telif result is None:\n\t\t\t\t\tresult = new\n\t\t\t\telse:\n\t\t\t\t\t# Ensure that when the function and argument are output,\n\t\t\t\t\t# they are correctly parenthesized.\n\t\t\t\t\tif isinstance(result, (Abs, Ass, Que)):\n\t\t\t\t\t\tresult.surround_on_str = True\n\n\t\t\t\t\tif isinstance(new, App):\n\t\t\t\t\t\tnew.surround_on_str = True\n\n\t\t\t\t\tresult = App(result, new, line=new.line, pos=new.pos)\n\n\t\t\tif done:\n\t\t\t\tbreak\n\n\t\treturn result", "def create_operators(op_param_list, global_config=None):\n assert isinstance(op_param_list, dict), ('operator config should be a dict')\n ops = []\n for operator in op_param_list.items():\n assert isinstance(operator, tuple) and len(operator) == 2, \"yaml format error\"\n op_name = list(operator)[0]\n param = {} if operator[1] is None else operator[1]\n if global_config is not None:\n param.update(global_config)\n op = eval(op_name)(**param)\n ops.append(op)\n return ops", "def create_operators(params):\n assert isinstance(params, list), ('operator config should be a list')\n ops = []\n for operator in params:\n assert isinstance(operator,\n dict) and len(operator) == 1, \"yaml format error\"\n op_name = list(operator)[0]\n param = {} if operator[op_name] is None else operator[op_name]\n op = getattr(imaug, op_name)(**param)\n ops.append(op)\n\n return ops", "def expon(*args, **kws) -> core.Expon:\n X, Y, kws = util.parseargs(*args, **kws)\n assert \"exp\" in kws\n return core.Expon(X, Y, **kws)", "def _parse_parameter_list(\n parameter_list: abc.Iterable[str],\n normalize_parameter_names: bool = False,\n normalize_parameter_values: bool = True,\n strip_interior_whitespace: bool = False) -> list[tuple[str, str]]:\n parameters = []\n for param in parameter_list:\n param = param.strip()\n if param:\n name, value = param.split('=')\n if strip_interior_whitespace:\n name, value = name.strip(), value.strip()\n if normalize_parameter_names:\n name = name.lower()\n if normalize_parameter_values:\n value = value.lower()\n parameters.append((name, _dequote(value.strip())))\n return parameters", "def _parseArgs(function_args, client_info):\n\n # Split args using the csv module\n arg_list = []\n try:\n reader = csv.reader([function_args])\n _arg_list = reader.next()\n except:\n _arg_list = [function_args]\n\n # Sanitise all the info first\n sanitised_info = {}\n for info in client_info:\n (name, value) = _sanitise(info, client_info[info])\n if value:\n sanitised_info[name] = value\n \n # Now run the given args through SimpleTemplate to do variable replacements\n for arg in _arg_list:\n foo = Template(arg.strip()).safe_substitute(sanitised_info)\n arg_list.append(foo)\n\n\n return arg_list", "def args(hub, val: List[str] or str) -> Tuple[List[str], Dict[str, str]]:\n args = []\n kwargs = {}\n for v in hub.render.cli.render(val):\n if isinstance(v, dict):\n kwargs.update(v)\n else:\n args.append(v)\n\n return args, kwargs", "def makeloop(keyword, G, *args):\n if not args:\n return []\n Nargs = len(args)\n lis = []\n for arg in args:\n lis.append(makeiter(G(\"%s%s\" % (keyword, arg))))\n try:\n Nlis = lis[0].count()\n except TypeError:\n Nlis = len(lis[0])\n olist = [[] for i in range(Nargs)]\n for i in range(Nlis):\n for k in range(Nargs):\n try:\n olist[k].append(lis[k][i])\n except Exception:\n olist[k].append(\"\")\n return olist", "def _parse_params(members_list):\n return [literal_eval(p.strip()) for p in members_list]", "def _unify_exprs(self, exprs):\n if isinstance(exprs, (str, unicode)):\n # We are only being given a single string expression.\n exprs = self.exprs[exprs]\n elif isinstance(exprs, theano.tensor.basic.TensorVariable):\n # TODO: does this work in case of the GPU?\n exprs = exprs\n else:\n # We have several, either string or variable, thus make it a list\n # and substitute the strings.\n exprs = list(exprs)\n exprs = [self.exprs[i] if isinstance(i, str) else i for i in exprs]\n\n return exprs", "def wrapper(self, arglist):\n args = []\n kwargs = {}\n if arglist:\n for arg in shlex.split(arglist):\n if \"=\" in arg:\n split = arg.split(\"=\", 1)\n kwargs[split[0]] = split[1]\n else:\n args.append(arg)\n return fxn(self, *args, **kwargs)", "def process(list_, dict_, keyword):\n if len(list_) == 4:\n name, val, type_, frac_ = list_[0], list_[1], list_[2], list_[3]\n elif list_[0] == 'direc':\n name, val = list_[0], [list_[i] for i in range(len(list_)) if i > 0]\n else:\n name, val = list_[0], list_[1]\n\n if name not in dict_[keyword].keys() and name in ['coeff']:\n dict_[keyword][name] = []\n if keyword in ['TREATED', 'UNTREATED', 'COST'] and 'types' not in dict_[keyword].keys():\n dict_[keyword]['types'] = []\n if keyword in ['TREATED', 'UNTREATED', 'COST']:\n if len(list_) == 4:\n dict_[keyword]['types'] += [[type_, float(frac_)]]\n else:\n dict_[keyword]['types'] += ['nonbinary']\n\n # Type conversion\n if name in ['agents', 'seed', 'maxiter', 'disp']:\n val = int(val)\n elif name in ['source', 'file', 'optimizer', 'start']:\n val = str(val)\n elif name in ['direc']:\n val = list(val)\n else:\n val = float(val)\n if name in ['coeff']:\n dict_[keyword][name] += [val]\n else:\n dict_[keyword][name] = val\n # Finishing.\n return dict_", "def eval_lisp(exp, a_list, d_list):\n if exp.atom():\n if exp.int():\n return exp\n if exp in T:\n return SExp(\"T\")\n if exp.null():\n return SExp(\"NIL\")\n if in_pairlist(exp, a_list):\n return getval(exp, a_list)\n raise error.LispException(\"unbound variable: {0}\".format(exp))\n if exp.car().atom():\n if not exp.car().non_int_atom:\n msg = \"'{0}' is not a valid function name or \" \\\n \"special form\".format(exp.car())\n raise error.LispException(msg)\n\n #cdar because cdr only would give (quote 5) evaluating to (5),\n #not 5. only takes one argument.\n if exp.car() in QUOTE:\n check_args(exp.car(), exp.cdr().length(), 1)\n return exp.cdr().car()\n if exp.car() in COND:\n return evcond(exp.cdr(), a_list, d_list)\n if exp.car() in DEFUN:\n new_func = exp.cdr().car()\n args = exp.cdr().cdr().car()\n body = exp.cdr().cdr().cdr().car()\n check_args(new_func, exp.cdr().length(), 3)\n return defun(new_func, args, body, d_list)\n return apply_lisp(exp.car(),\n evlis(exp.cdr(), a_list, d_list),\n a_list, d_list)\n raise error.LispException(\"eval called with invalid expression\")", "def f(self,x,*args):\n #TODO: switch back to this system if the mapping technique is too confusing\n# for p,a in zip(self.params,args):\n# setattr(self,p,a)\n# mval = [m.f(x,*m.parvals) for m in self._models]\n if len(args)!=len(self._pars):\n raise ValueError('incorrect number of parameters')\n\n parlists = self._parlists\n for a,(i,j) in zip(args,self._parlistmaps):\n parlists[i][j] = a\n\n #mval = [m.f(x,*parlists[i]) for i,m in enumerate(self._models)]\n #TODO:speed-up/cacheing?\n for m,pl in zip(self._models,parlists):\n m.parvals = pl\n mval = [m(x) for m in self._models]\n return eval(self._opstr)", "def multi_mapping(func_name, arg_value_pairs, module_name = \"__main__\"):\n func, arg_names = get_function_args(module_name = module_name, function_name = func_name)\n \n return list(map(lambda arg_value_pair: call_func_dynamically(function_name = func_name, \n argument_names = arg_names, \n arg_value_pair = arg_value_pair) ,\n arg_value_pairs))", "def enterParams(o, params):\n r = {}\n for p in params:\n if isinstance(p, tuple):\n p, f = p\n else:\n f = str\n if hasattr(o, p):\n r[p] = f(getattr(o, p))\n return r", "def import_args_from_dict(value, args, config):\n if isinstance(value, six.string_types):\n for match in TOKEN_REGEX.finditer(str(value)):\n token = match.group(1)\n if token in args:\n actual_param = args[token]\n if isinstance(actual_param, six.string_types):\n value = value.replace(\"@\"+token, args[token])\n else:\n value = actual_param\n elif isinstance(value, list):\n return [import_args_from_dict(item, args, config) for item in value]\n elif isinstance(value, dict):\n return {\n key: import_args_from_dict(val, args, config)\n for key, val in value.items()\n }\n elif isinstance(value, tuple):\n return tuple(import_args_from_dict(val, args, config) for val in value)\n return value", "def parse(operators, *term_strs):\n scope = Scope()\n rvs = []\n for ts in term_strs:\n p = Parser(operators, ts, scope)\n try:\n term = p.parse()\n except SyntaxError:\n print 'While parsing: %s' % ts\n raise\n rvs.append(term)\n rvs.append(scope)\n return tuple(rvs)", "def unquoter(datum):\n if isinstance(datum, Pair):\n return evaluate(snek_to_py(datum), env)\n elif isinstance(datum, str):\n val = evaluate(datum, env)\n return val\n elif isinstance(datum, list):\n val = evaluate(datum, env)\n return unquoter(val)\n else:\n return datum", "def _make_args(self, args, defaults=[], vararg=None, kwonlyargs=[],\n kw_defaults=[], kwarg=None):\n # On Python 2 convert vararg and kwarg to raw name, raise error using\n # lineno stored on the node and lexer from self.\n # On Python 3.3 extract name and annotation\n # After should be straight forward\n raise NotImplementedError()" ]
[ "0.5543349", "0.5514955", "0.54775155", "0.54402816", "0.5358683", "0.52867675", "0.5281894", "0.52406925", "0.52398866", "0.51826596", "0.51759326", "0.51671344", "0.51667196", "0.5136077", "0.51311547", "0.5085241", "0.5079983", "0.5065142", "0.49714673", "0.4969139", "0.49396652", "0.4918426", "0.48903853", "0.48642713", "0.4863107", "0.48458695", "0.4835208", "0.4833897", "0.4827445", "0.4826804", "0.4825764", "0.48232713", "0.48220232", "0.48209253", "0.48208836", "0.48093992", "0.4801287", "0.47982293", "0.47980034", "0.47912455", "0.47736856", "0.47690383", "0.47564855", "0.47448325", "0.47387794", "0.47219655", "0.47159305", "0.47148952", "0.4707504", "0.47071525", "0.47028464", "0.46911782", "0.4669481", "0.4667732", "0.4650615", "0.4648412", "0.4646231", "0.4634059", "0.46289667", "0.462655", "0.46256804", "0.46216637", "0.461986", "0.46183494", "0.46149066", "0.4608871", "0.46063077", "0.46017033", "0.45984882", "0.4598142", "0.45965225", "0.45945382", "0.4590149", "0.45882738", "0.4586742", "0.45857418", "0.4565453", "0.45648625", "0.45619935", "0.4561092", "0.45585763", "0.45573598", "0.4552842", "0.4546175", "0.45447832", "0.4544181", "0.4536077", "0.4535679", "0.45325243", "0.45254278", "0.45247757", "0.4521144", "0.45182472", "0.4518094", "0.4509448", "0.45081455", "0.45062855", "0.45060104", "0.4504709", "0.45025566" ]
0.6068337
0
Return two title strings and a format string corresponding to
def format_pair(self, k, v): if isinstance(v, int): data_width = len(str(v)) + 1 header_width = len(str(k)) w = max(data_width, header_width) h = ('%% %us'%w)%k return ' '*len(h), h, '%%%ud'%w elif k=='dt': fmt = '%6.3f' return 6*' ', '%6s'%k, fmt elif isinstance(v, float): fmt = '% .3e' data_width = len(fmt%1) header_width = len(str(k)) w = max(data_width, header_width) spaces = ' '*(w-data_width) h = ('%%%us'%w)%k return ' '*len(h), h, spaces+fmt elif isinstance(v, dict): results = [self.format_pair(k_, v_) for k_, v_ in v.items()] keys = ' '.join([str(r[-2]) for r in results]) fmts = ' '.join([str(r[-1]) for r in results]) h1 = ('%%.%us'%(len(keys))) % k pl = (len(keys)-len(h1)) // 2 pr = (len(keys)-len(h1)) - pl h1 = '.' * pl + h1 + '.' * pr return h1, keys, fmts elif isinstance(v, h5py.ExternalLink): data_width = len('hdf5-link') header_width = len(str(k)) w = max(data_width, header_width) h = ('%%%us'%w)%k return ' '*len(h), h, '%%%us'%w elif isinstance(v, h5py.VirtualLayout): data_width = len('hdf5-vds') header_width = len(str(k)) w = max(data_width, header_width) h = ('%%%us'%w)%k return ' '*len(h), h, '%%%us'%w else: fmt = '%%%u.%us' % (self.min_str_len, self.max_str_len) w = len(fmt%v) h = ('%%%us'%w)%k return ' '*len(h), h, fmt
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_title(cfg, alias_1, attrs_1, alias_2=None, attrs_2=None,\n op_type='-'):\n if alias_2 is None:\n title = alias_1\n if cfg['years_in_title']:\n title += f\" ({attrs_1['start_year']}-{attrs_1['end_year']})\"\n return title\n if attrs_2 is None:\n raise ValueError(\n \"'attrs_2' needs to be given when 'alias_2' is not None\")\n if op_type == 'rel_bias':\n if not cfg['years_in_title']:\n title = f\"({alias_1} - {alias_2}) / {alias_2}\"\n return title\n if (attrs_1['start_year'] == attrs_2['start_year']\n and attrs_1['end_year'] == attrs_2['end_year']):\n title = (f\"({alias_1} - {alias_2}) / {alias_2} \"\n f\"({attrs_1['start_year']}-{attrs_1['end_year']})\")\n else:\n title = (f\"({alias_1} ({attrs_1['start_year']}-\"\n f\"{attrs_1['end_year']}) - {alias_2} (\"\n f\"{attrs_2['start_year']}-{attrs_2['end_year']})) / \"\n f\"{alias_2} ({attrs_2['start_year']}-\"\n f\"{attrs_2['end_year']})\")\n return title\n if not cfg['years_in_title']:\n title = f\"{alias_1} {op_type} {alias_2}\"\n return title\n if (attrs_1['start_year'] == attrs_2['start_year']\n and attrs_1['end_year'] == attrs_2['end_year']):\n title = (f\"{alias_1} {op_type} {alias_2} ({attrs_1['start_year']}-\"\n f\"{attrs_1['end_year']})\")\n else:\n title = (f\"{alias_1} ({attrs_1['start_year']}-{attrs_1['end_year']}) \"\n f\"{op_type} {alias_2} ({attrs_2['start_year']}-\"\n f\"{attrs_2['end_year']})\")\n return title", "def _make_title(self):\n ret = self.properties['reason'].capitalize()\n ret += ' has been reported near ' + self.properties['address'].split(',')[0]\n time = datetime.strptime(self.properties['when'], '%Y-%m-%dT%H:%M:%S')\n times = [time.strftime(i).lstrip('0') for i in ('%m', '%d', '%I:%M%p')]\n ret += ' on {}/{} at {}'.format(times[0], times[1], times[2])\n return ret", "def create_title(title, year=None, time_step=None, base=0, interval=None,\n gage=None, m=None, h=None):\n if type(gage) is list or type(gage) is tuple:\n title = title + ' at listed gages'\n elif gage is not None:\n title = title + ' at '+ gage\n \n if m is not None:\n title = title + ' for Month {mo} of'.format(mo=m)\n elif h is not None:\n title = title + ' for Hour {ho} of'.format(ho=h) \n elif interval is 'seasonal':\n title = title + ' for Months of'\n elif interval is 'diurnal':\n title = title + ' for Hours of'\n if time_step is not None:\n ts = time_step.replace('min', ' minute').replace('T', ' minute').replace('H', ' hour').replace('D', ' day')\n title = title.format(ts=ts)\n if year is not None:\n title = title +' '+ year\n return title", "def formatName(self):\r\n return self.title.getVal() + \" \" + self.first.getVal() + \" \" + self.last.getVal()", "def get_title():", "def Title(self, **kwargs):\n full_name = ''\n if self.getFirstname() == '' or self.getLastname() == '':\n if not self.getOrganization():\n return '...'\n else:\n return self.getOrganization()\n format = kwargs.get('format', None)\n if format == 'natural':\n full_name = '%s %s' % (self.getFirstname(), self.getLastname())\n else:\n full_name = '%s %s' % (self.getLastname(), self.getFirstname())\n return '%s' % full_name", "def format_title(self, data):\n return data", "def generate_title(radar, field, sweep, datetime_format=None, use_sweep_time=True):\n if use_sweep_time:\n begin_time = generate_radar_time_sweep(radar, sweep)\n else:\n begin_time = generate_radar_time_begin(radar)\n if datetime_format:\n time_str = begin_time.strftime(datetime_format)\n else:\n time_str = begin_time.isoformat() + \"Z\"\n fixed_angle = radar.fixed_angle[\"data\"][sweep]\n l1 = f\"{generate_radar_name(radar)} {fixed_angle:.1f} Deg. {time_str} \"\n field_name = generate_field_name(radar, field)\n return l1 + \"\\n\" + field_name", "def add_time_to_title( self, title ):\n begin = self.begin; end = self.end\n if 'span' in self.metadata:\n interval = self.metadata['span']\n elif 'given_kw' in self.metadata and 'span' in self.metadata['given_kw']:\n interval = self.metadata['given_kw']['span']\n else:\n interval = self.time_interval( )\n formatting_interval = self.time_interval()\n if formatting_interval == 600:\n format_str = '%H:%M:%S'\n elif formatting_interval == 3600:\n format_str = '%Y-%m-%d %H:%M'\n elif formatting_interval == 86400:\n format_str = '%Y-%m-%d'\n elif formatting_interval == 86400*7:\n format_str = 'Week %U of %Y'\n\n if interval < 600:\n format_name = 'Seconds'\n time_slice = 1\n elif interval < 3600 and interval >= 600:\n format_name = 'Minutes'\n time_slice = 60\n elif interval >= 3600 and interval < 86400:\n format_name = 'Hours'\n time_slice = 3600\n elif interval >= 86400 and interval < 86400*7:\n format_name = 'Days'\n time_slice = 86400\n elif interval >= 86400*7:\n format_name = 'Weeks'\n time_slice = 86400*7\n else:\n format_str = '%x %X'\n format_name = 'Seconds'\n time_slice = 1\n\n begin_tuple = time.gmtime(begin); end_tuple = time.gmtime(end)\n added_title = '\\n%i %s from ' % (int((end-begin)/time_slice), format_name)\n added_title += time.strftime('%s to' % format_str, begin_tuple)\n if time_slice < 86400:\n add_utc = ' UTC'\n else:\n add_utc = ''\n added_title += time.strftime(' %s%s' % (format_str, add_utc), end_tuple)\n return title + added_title", "def prep_titles(self, cost_title: str=\"\") -> (str, str):\n img_title = self.function_name + \\\n '_batch' + str(self.batch_size)\n\n if cost_title == \"\":\n img_title = str(self.experiment_count) + '_accuracy_plot_' + img_title\n title = self.title + \\\n '\\n' + self.function_name + \", \" + \\\n 'mini-batch size: ' + str(self.batch_size) + \\\n '\\nAvg Last 10 Epochs: Training ' + self.tr_mean_str + '%, Testing ' + self.test_mean_str + '%'\n else:\n img_title = str(self.experiment_count) + '_cost_plot_' + img_title\n title = cost_title\n\n print(f'\\nexperiment: {img_title}')\n return title, img_title", "def inclusive_title(self):\n return self.title + (\" %s\" % (self.episode_to_string(self.latest_season, self.latest_episode),) if self.is_series() else \"\")", "def _get_full_title(self):\n return \"%s - %s %d\" % (self.title, _('Season'), self.season)", "def pretty_title(title):\n output = '-' * 5 + ' ' + title + ' ' + '-' * 5\n return output", "def title(self):\n strng = \"\"\n if self.type:\n strng = self.type\n if self.server:\n if self.status:\n strng = \"%s\" % (strng)\n if not strng:\n strng = \"Error\"\n strng = \"%s on %s\" % (strng, self.server)\n elif self.status:\n strng = self.status\n if self.server:\n strng = \"%s on server %s\" % (strng, self.server)\n elif self.raw:\n strng = self.raw\n else:\n strng = self.error_timestamp.isoformat()\n if self.uid:\n strng = \"%s\" % (strng)\n return strng", "def format_name(f_name, l_name): #docstring (documentation)\n if f_name == \"\" or l_name == \"\":\n return \"You didn't provide valid inputs.\"\n formated_f_name = f_name.title()\n formated_l_name = l_name.title()\n return f\"Result: {formated_f_name} {formated_l_name}\"", "def format(self) -> str:", "def _get_title_and_explanation(self):\n title = \"\"\n more_lines = []\n if self.__doc__:\n # Find the first non-empty line in the docstring. If there is\n for line in self.__doc__.split(\"\\n\")[:-1]: # strip off last line, always blank\n line = line.strip()\n if line:\n if not title:\n # We don't have the title set, yet, so we know this is the first line.\n if line.endswith(\".\"):\n # Don't want a period at the end of a title to make it look\n # better.\n title = line[:-1]\n else:\n title = line\n continue\n if not line and not more_lines:\n # We don't need empty lines at the start of the explanation\n continue\n # Add up the lines of the explanation text\n if line.startswith(\"*\"):\n line = f\"&nbsp; &nbsp; {line}\"\n\n more_lines.append(line or \"<br>&nbsp;<br>\") # Empty lines become line break\n return ((title or \"A resource\"), \" \".join(more_lines))", "def name_with_title(self):\n return \"%s %s\" % (self.title, self.name)", "def printable(title, subtitle=None, resp=None):\n title = getfirst(title)\n subtitle = getfirst(subtitle)\n resp = getfirst(resp)\n if subtitle:\n title += \" : \" + subtitle\n if resp:\n title += \" / \" + resp\n return title", "def _prettyfilename(self):\n return f'{self.title} ({self.year})'", "def get_descriptive_name(self):\n return f\"{self.year} {self.make} {self.model}\".title()", "def getFormat(headings):\n Indent = 4\n DescWid = 20\n ColWid = 12\n\n # figure out how wide our columns have to be\n wid = 0\n for s in headings:\n if len(s) > wid:\n wid = len(s)\n if wid >= ColWid:\n ColWid = wid + 1\n\n # generate the format string\n f = \"\"\n i = 0\n while i < Indent:\n f += ' '\n i += 1\n\n col = 0\n while col < len(headings):\n wid = DescWid if col == 0 else ColWid\n f += '%'\n if col == 0:\n f += \"-%ds\" % wid\n else:\n f += \"%ds\" % wid\n col += 1\n return f", "def get_title(self):\n title = (None, 7)\n for text, level in self._headers:\n if level < title[1]:\n title = (text, level)\n return title[0]", "def title_draw():\n nonlocal width\n widthTitle = len(self.str_title)\n if widthTitle > width:\n self.str_title = self.str_title[0:width-5] + '...'\n widthTitle = len(self.str_title)\n h_len = widthTitle + self.l_padding + self.r_padding\n top = ''.join(['┌'] + ['─' * h_len] + ['┐']) + '\\n'\n result = top + \\\n '│' + \\\n ' ' * self.l_padding + \\\n self.str_title + \\\n ' ' * self.r_padding + \\\n '│' + self.str_shadow + '\\n'\n offset = 2 + self.l_padding + len(self.str_title) + self.r_padding\n return result, offset", "def pretty_title(title):\n output = '-' * 5 + ' ' + title.lower() + ' ' + '-' * 5\n return output", "def html_title(title):\n return '<center><h1>%s</h1></center>' % (title)", "def _title(profile):\n if profile['operation'] == 'differential':\n p1, p2 = profile['profiles']\n return 'differential ({}, {})'.format(_title(p1), _title(p2))\n elif profile['operation'] == 'local feature':\n p = profile['profile']\n return 'local feature {} ({})'.format(profile['function'], _title(p))\n else:\n return ' '.join([str(x) for x in profile.values()])", "def make_title(words):", "def _define_formats(self, workbook):\n self.format_title_main_center = workbook.add_format({\n 'bold': True,\n 'align': 'left',\n 'font_size': 14,\n 'border': True,\n 'font_name':'Arial',\n 'align': 'Center',\n 'bg_color': '#D8D7D7',\n })\n self.format_title = workbook.add_format({\n 'align': 'left',\n 'font_size': 12,\n 'border': True,\n 'font_name':'Arial',\n 'text_wrap': True\n })\n self.format_title_noborder = workbook.add_format({\n 'align': 'left',\n 'font_size': 12,\n 'border': False,\n 'font_name':'Arial'\n })\n self.format_title_noborder_bold = workbook.add_format({\n 'align': 'left',\n 'font_size': 12,\n 'bold': True,\n 'border': False,\n 'font_name':'Arial'\n })\n self.format_title_center = workbook.add_format({\n 'align': 'left',\n 'font_size': 12,\n 'border': True,\n 'align': 'Center',\n 'font_name':'Arial'\n })\n self.format_title_bold = workbook.add_format({\n 'align': 'left',\n 'font_size': 12,\n 'border': True,\n 'font_name':'Arial',\n 'bold': True,\n })\n self.format_title_center_bold = workbook.add_format({\n 'align': 'left',\n 'font_size': 12,\n 'border': True,\n 'font_name':'Arial',\n 'align': 'Center',\n 'bold': True,\n })\n self.format_title_number = workbook.add_format({\n 'align': 'right',\n 'font_size': 12,\n 'border': True,\n 'font_name':'Arial',\n 'num_format': '#,##0.00',\n })\n self.format_title_number_bold = workbook.add_format({\n 'align': 'right',\n 'font_size': 12,\n 'border': True,\n 'font_name':'Arial',\n 'num_format': '#,##0.00',\n 'bold': True,\n 'bg_color': '#D8D7D7',\n })\n \n self.format_header = workbook.add_format({\n 'bold': True,\n 'border': True,\n 'font_name':'Arial',\n 'font_size': 12,\n 'align': 'Center',\n 'bg_color': '#D8D7D7', \n })\n\n self.merge_format = workbook.add_format({\n 'bold': 1,\n 'border': 1,\n 'align': 'center',\n 'valign': 'vcenter',\n })", "def show_title():\r\n complement = (\r\n '\\n __ ')\r\n title = ('\\n _______ _______________ ____ _______ __ ___ _ _______/ /_ ____ _____ ____ ____ ')\r\n title += ('\\n / ___/ / / / ___/ ___/ _ \\/ __ \\/ ___/ / / / / _ \\| |/_/ ___/ __ \\/ __ `/ __ \\/ __ `/ _ \\ ')\r\n title += ('\\n/ /__/ /_/ / / / / / __/ / / / /__/ /_/ / / __/> </ /__/ / / / /_/ / / / / /_/ / __/ ')\r\n title += ('\\n\\___/\\__,_/_/ /_/ \\___/_/ /_/\\___/\\__, / \\___/_/|_|\\___/_/ /_/\\__,_/_/ /_/\\__, /\\___/ ')\r\n title += ('\\n /____/ /____/ ')\r\n # Add Styles\r\n break_line = ('-' * len(complement) + \"\\n\") * 2\r\n print(\"{}\\n{}\\n{}\\n\".format(break_line, title, break_line))", "def combined_description(desc1, desc2):\n description = desc1\n if desc2:\n description = '{0}_{1}'.format(desc1, desc2)\n\n return description", "def template(title, steps, loops):\n return \"%-8s %-20s %20d\" % (title, format_power(steps), loops)", "def _header_string( self, title='title' ): \n return_str = ''\n return_str += '{}\\n\\n'.format( title )\n return_str += '{} atoms\\n'.format( len(self.atoms) )\n if len(self.bonds) != 0:\n return_str += '{} bonds\\n\\n'.format( len(self.bonds) )\n return_str += '{} atom types\\n'.format( len(self.atom_types ) )\n if len(self.bond_types) != 0:\n return_str += '{} bond types\\n\\n'.format( len(self.bond_types ) )\n return_str += '\\n'\n return return_str", "def print_title(title):\n print \"\\n\"+\"#\"*32+\"\\n# \"+title+\"\\n\"+\"#\"*32+\"\\n\"", "def _make_title(self, ind):\n start = self.df_event_time.loc[ind, 'time']\n date = np.datetime_as_string(start.astype('<M8[ns]'), unit='s')\n start_ns = start - (start // 10**9) * 10**9\n end = self.df_event_time.loc[ind, 'endtime']\n end_ns = end - start + start_ns\n return ''.join((f'##Event {ind} from run {self.run_id}\\n',\n f'##Recorded at ({date[:10]} {date[10:]}) UTC ',\n f'{start_ns} ns - {end_ns} ns'))", "def output_sep_title(title):\n print(f\"{sep_mark}\\t{title}{sep_mark}\")", "def get_short_name(self):\n split = self.name.split(' - ')\n # author, year, and first couple of words of paper title\n return \"{} ({}), {}\".format(split[0], split[1], \" \".join(split[2].split(' ')[:3]))", "def get_full_title(self, separator=u' / ', first_index=0):\n person_name = self.get_person_title()\n title = self.Title(separator=separator, first_index=first_index).decode('utf8')\n if title[0:1] == '(':\n return u\"%s %s\" % (person_name, title)\n else:\n return u\"%s, %s\" % (person_name, title)", "def get_title(fn):\n title = fn.name if hasattr(fn, 'name') else fn.__name__\n title = title.replace('_cut_function','')\n suffix = []\n # if 'JetsAK15_subleading_' in title:\n # suffix.append(r'$j^{\\mathrm{AK15}}_{\\mathrm{subl}}$')\n title = title.replace('JetsAK15_subleading_', '').replace('subleading_', '')\n if hasattr(fn, 'left'):\n suffix.append('({:.0f} < {} < {:.0f})'.format(fn.left, svjflatanalysis.utils.get_title('mt'), fn.right))\n # Transform variable name to title stirng\n title = svjflatanalysis.utils.get_title(title)\n if hasattr(fn, 'operator'):\n title += ' ' + fn.operator + ' cut'\n # Add the suffix\n title += ' ' + ' '.join(suffix)\n return title", "def title(string):\n print(\"{}\\n{}\\n\".format(bold(string), underline(string, \"=\")))", "def parse_title(self, pre):\n # Extract datapoints\n title_text = str(pre)\n title = {}\n\n identity_data = self.identity_regex.search(title_text)\n title['linc'] = int(identity_data.group(1).strip().replace(' ', ''))\n title['short_legal'] = identity_data.group(2).strip().replace(';', ' ')\n title['title_number'] = identity_data.group(3).strip()\n\n try:\n title['ats_reference'] = self.ats_regex.search(title_text).group(1).replace(';',' ')\n except AttributeError:\n title['ats_reference'] = ''\n\n title['municipality'] = self.municipality_regex.search(title_text).group(1).replace('\\r','')\n\n try:\n references = self.reference_regex.search(title_text).group(1).split(\"\\n\")\n references = [i.strip() for i in references]\n references = list(filter(None, references))\n title['reference_number'] = references\n except AttributeError:\n title['reference_number'] = ['']\n\n payday_raw = self.payday_regex.search(title_text).group(3).strip('</pre>').strip()\n title['registration'] = payday_raw[:11]\n title['date'] = reversed(payday_raw[15:25].split('/'))\n title['date'] = '-'.join(title['date'])\n title['document_type'] = payday_raw[27:46].strip()\n\n title['value'] = self._try_int(payday_raw[46:62].strip())\n title['consideration'] = self._try_int(payday_raw[62:80].strip())\n\n if \"CONDOMINIUM\" in title_text:\n title['condo'] = True\n else:\n title['condo'] = False\n\n title['title_text'] = title_text.strip('<pre>').strip('</pre>').strip()\n\n return title", "def print_title( title, decorators ):\n decorators = \"*\" * decorators\n print \"\\n%s %s: %s\\n\" % ( decorators, title, decorators )", "def format_name(f_name, l_name):\n #Using an early return if inputs aren't valid\n if f_name == \"\" or l_name == \"\":\n return \"You didn't provide valid inputs.\"\n\n formatted_f_name = f_name.title()\n formatted_l_name = l_name.title()\n\n #Returning a formatted string when inputs are valid\n return f\"{formatted_f_name} {formatted_l_name}\"", "def owner_and_subtitle_helper(self):\n\n subtitle = \"<br><sup>\"\n owner = self.ui.comboBox_coders.currentText()\n if owner == \"\":\n owner = '%'\n else:\n subtitle += _(\"Coder: \") + owner + \" \"\n if self.ui.comboBox_category.currentText() != \"\":\n subtitle += _(\"Category: \") + self.ui.comboBox_category.currentText()\n return owner, subtitle", "def dc_title(self):\n return u\"{0} ({1}): {2} {3}\".format(\n self.label, self.in_assessment[0].timepoint,\n self.subjects[0].code_in_study,\n \"...\" if len(self.subjects) > 1 else \"\")", "def _prettyfilename(self):\n return self.title", "def make_title(dawn: str | None, dusk: str | None, /) -> str:\n logger.debug('Making title')\n if not dawn or not dusk:\n logger.error('Cannot find start/end date\\n')\n sys.exit(1)\n api_dfm, msg_dfm = '%Y-%m-%dT%H:%M:%SZ', '%d %B %Y'\n try:\n start_date = datetime.strptime(dawn, api_dfm).strftime(msg_dfm)\n end_date = datetime.strptime(dusk, api_dfm).strftime(msg_dfm)\n except ValueError as err:\n logger.error(f'{err}\\n')\n sys.exit(1)\n\n logger.debug('Title was made\\n')\n return f'From: {start_date} - To: {end_date}'", "def title(text, level=0):\n return '\\n' + text + '\\n' + '=-~_#%^' [level] * len(text) + '\\n\\n'", "def __str__(self):\n # These are required tags so we should have generated an\n # error beforehand and this shouldn't raise a ``KeyError``\n s = [(\"Album Title\", self[\"TITLE\"]), (\"Album Artist\", self[\"ARTIST\"]),\n (\"Year\", self[\"DATE_RECORDED\"]), (\"Genre\", self[\"GENRE\"])]\n s = OrderedDict(s)\n\n def add_optional(key):\n nonlocal s\n if key in self:\n text = key.replace('_', ' ').split(' ')\n text = ' '.join([x.capitalize() for x in text])\n s[text] = self[key]\n\n add_optional(\"LABEL\")\n add_optional(\"ISSUE_DATE\")\n add_optional(\"ORIGINAL_MEDIUM\")\n add_optional(\"VERSION\")\n add_optional(\"HD_FORMAT\")\n add_optional(\"DISC_NAME\")\n add_optional(\"PHASE_NAME\")\n if self.discs > 1:\n s[\"Disc\"] = self[\"PART_NUMBER\"]\n s[\"Discs\"] = self.discs\n if self.channels != \"2.0\":\n s[\"Channels\"] = self.channels\n # Now we have to deal with the formatted output. First we need\n # the maximum length of the keys to properly align the output\n # Note that the keys used will have a space appended, so we add 1\n max_len = max(len(x[0]) for x in s)+1\n\n # Output for an entry in ``s`` of (\"Year\", \"2016\") with a ``max_len`` of 10\n # would be: '= Year .....: 2016'\n def line(k, v):\n return f\"{k.ljust(max_len, '.')}: {v}\"\n\n s = [line(*x) for x in s.items()]\n # Now we can reuse ``max_len`` to mean the longest fully formatted line\n # We want to add '= ' to the left side and ' =' to the right side to\n # form a border\n max_len = max(len(x) for x in s)\n s = [f'= {x:{max_len}} =' for x in s]\n max_len += 4\n s = [\" ALBUM INFORMATION \".center(max_len, \"=\")] + s + [\"=\" * max_len]\n return \"\\n\".join(s)", "def _generate_title_description(psap_id, title, description):\n if description is None:\n description = PersistentFields.get_description(psap_id)\n else:\n PersistentFields.set_description(psap_id, description)\n if title is None:\n title = PersistentFields.get_title(psap_id)\n else:\n PersistentFields.set_title(psap_id, title)\n\n return title, description", "def get_descriptive_name(self):\r\n long_name=str(self.year)+' '+self.make+' '+self.model\r\n return long_name.title()", "def ftitle(self, text):\n return \"{} - {}\".format(self._app_name, text)", "def get_descriptive_name(self):\r\n long_name = str(self.year)+' '+self.make + ' '+self.model\r\n return long_name.title()", "def get_describe_name(self):\n long_name = str(self.year)+ ' ' + self.make.title()+ ' ' +self.model.title()\n return long_name", "def TitlePrint(title):\n titleLength = len(title)\n barLength = titleLength + 12\n fmtdTitle = '----- {0} -----'.format(title)\n bar = '-' * barLength\n print(bar, fmtdTitle, bar,\n sep='\\n', end='\\n\\n')", "def individual_info(self, ctx: commands.Context, format: str) -> str:\n\t\tformat = self.__normalize(ctx, format)\n\t\ttip = self.formats[format]\n\t\theader_text = self.__header(format, tip)\n\t\thow_to = blockquote(tip.escaped)\n\t\tfooter_text = self.__footer(format)\n\t\treturn f\"{header_text}\\n\\n{how_to}\\n\\n{footer_text}\"", "def get_descriptive_name(self):\n long_name = f\"{self.year} {self.make} {self.model}\"\n return long_name.title()", "def summary_title(tile_summary):\n return f\"Slide tile_summary.slide_name Tile Summary:\"", "def fCommon(self):\n return '%s %s, %4.4d %s:%2.2d %s' % (\n self._fmon, self._day, self._year, self._pmhour,\n self._minute, self._pm)", "def test_get_title(double_title, single_title, empty_title):\n assert get_title(double_title) == \"Parton distributions with LHC data\"\n assert get_title(single_title) == \"The Large Hadron Collider\"\n assert get_title(empty_title) == \"\"\n\n no_title_key = {\n \"not_titles\": []\n }\n assert get_title(no_title_key) == \"\"", "def generate_vpt_title(radar, field):\n time_str = generate_radar_time_begin(radar).isoformat() + \"Z\"\n l1 = f\"{generate_radar_name(radar)} {time_str} \"\n field_name = generate_field_name(radar, field)\n return l1 + \"\\n\" + field_name", "def format(self):\r\n\r\n earth = \"???\" if self.maskearth else self.earth\r\n air = \"???\" if self.maskair else self.air\r\n fire = \"???\" if self.maskfire else self.fire\r\n water = \"???\" if self.maskwater else self.water\r\n\r\n if any((self.earth, self.fire, self.water)):\r\n statsline = f'Stats: {earth}/{air}/{fire}/{water}'\r\n elif self.air:\r\n statsline = f'Air: {air}'\r\n else:\r\n statsline = ''\r\n\r\n return (\r\n f'Character {self.name}, [{self.token}]. '\r\n f'Init: {self.init} {statsline} Owner: {self.user.name}'\r\n )", "def __str__(self):\n return \"{title}\".format(title=self.title)", "def format(self, *args, **kwargs) -> String:\n pass", "def make_main_title(self, end, end_center=False):\n main_title = r\"\\begin{center}\"\n if self.detector is not None:\n main_title += \"%s \"%self.detector\n if self.selection is not None:\n main_title += \"%s Event Selection \"%self.selection\n main_title += end\n if end_center:\n main_title += r\"\\end{center}\"\n return main_title", "def get_descriptive_name(self):\r\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\r\n return long_name.title()", "def __str__(self):\n date_str = self.date.strftime(self.journal.config['timeformat'])\n title = date_str + \" \" + self.title\n body = self.body.strip()\n\n return \"{title}{sep}{body}\\n\".format(\n title=title,\n sep=\"\\n\" if self.body else \"\",\n body=body\n )", "def __str__(self):\n return '%s, %s: %s' % (self.first_author, self.year, self.title)", "def get_title(self, entry):\n title = _('%(title)s (%(word_count)i words)') % \\\n {'title': entry.title, 'word_count': entry.word_count}\n return title", "def numbered_title(self):\n return f\"{self.title}\"", "def numbered_title(self):\n return f\"{self.title}\"", "def entry_formatter(\n author: str = \"No author\",\n title: str = \"No title\",\n clipping: str = \"No title\",\n timestamp: str = \"No timestamp\",\n id: str = \"No ID\"\n ):\n header = f\"{title}, by {author}.\"\n\n \n return f\"\"\"\n {header}\n ----------------\n \"{clipping}\"\n\n Saved: {timestamp}\n ID: {id}\n \"\"\"", "def formatted(self) -> str:\r\n ...", "def subtitle(string):\n print(\"{}\\n{}\\n\".format(bold(string), underline(string, \"-\")))", "def test_title(self):\n self.assertEquals(\"Title\\n=====\", trans(\"== Title ==\"))\n self.assertEquals(\"Title\\n-----\", trans(\"=== Title ===\"))\n self.assertEquals(\"#### Title\", trans(\"==== Title ====\"))\n self.assertEquals(\"##### Title\", trans(\"===== Title =====\"))", "def getTitle(self): #$NON-NLS-1$\r", "def getTitle(self): #$NON-NLS-1$\r", "def generate_title(self, title=None):\n if title is None:\n title = self.header.get('title', self.title)\n\n title = self.generate(title)\n title = title.replace('<p>', '').replace('</p>', '')\n # no trailing newlines\n title = re.sub(r'\\n+', ' ', title).rstrip()\n return title", "def get_descriptive_name(self):\n long_name = f\"{self.make} {self.model} {self.year}\"\n \n return long_name.title()", "def _prettyfilename(self):\n return f'{self.grandparentTitle} - {self.seasonEpisode} - {self.title}'", "def get_title(self) -> str:\n pass", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def _build_title(db, place):\n descr = place.get_title()\n location = get_main_location(db, place)\n parish = location.get(PlaceType.PARISH)\n city = location.get(PlaceType.CITY)\n state = location.get(PlaceType.STATE)\n title_descr = \"\"\n if descr:\n title_descr += descr.strip()\n if parish:\n title_descr += ', ' + parish.strip() + _(\" parish\")\n if city:\n title_descr += ', ' + city.strip()\n if state:\n title_descr += ', ' + state.strip() + _(\" state\")\n return _strip_leading_comma(title_descr)", "def get_descriptive_name(self):\n description = (f\"{self.year} {self.manufacturer.title()} \"\n f\"{self.model.title()}\")\n\n return description", "def format_title(input_str):\n title_mapping = {'PD_whole_tree': 'Phylogenetic Diversity'}\n\n if input_str in title_mapping:\n return title_mapping[input_str]\n else:\n return ' '.join(map(lambda e: e[0].upper() + e[1:],\n input_str.split('_')))", "def display_strptime_formatters():\n data = [\n [\"%a\", \"Weekday as locale's abbreviated name.\", \"Mon\"],\n [\"%A\", \"Weekday as locale's full name.\", \"Monday\"],\n [\"%w\", \"Weekday as a decimal number, where 0 is Sunday and 6 is Saturday.\", \"1\"],\n [\"%d\", \"Day of the month as a zero-padded decimal number.\", \"30\"],\n [\"%-d\", \"Day of the month as a decimal number. (Platform specific)\", \"30\"],\n [\"%b\", \"Month as locale's abbreviated name.\", \"Sep\"],\n [\"%B\", \"Month as locale's full name.\", \"September\"],\n [\"%m\", \"Month as a zero-padded decimal number.\", \"09\"],\n [\"%-m\", \"Month as a decimal number. (Platform specific)\", \"9\"],\n [\"%y\", \"Year without century as a zero-padded decimal number.\", \"13\"],\n [\"%Y\", \"Year with century as a decimal number.\", \"2013\"],\n [\"%H\", \"Hour (24-hour clock) as a zero-padded decimal number.\", \"07\"],\n [\"%-H\", \"Hour (24-hour clock) as a decimal number. (Platform specific)\", \"7\"],\n [\"%I\", \"Hour (12-hour clock) as a zero-padded decimal number.\", \"07\"],\n [\"%-I\", \"Hour (12-hour clock) as a decimal number. (Platform specific)\", \"7\"],\n [\"%p\", \"Locale's equivalent of either AM or PM.\", \"AM\"],\n [\"%M\", \"Minute as a zero-padded decimal number.\", \"06\"],\n [\"%-M\", \"Minute as a decimal number. (Platform specific)\", \"6\"],\n [\"%S\", \"Second as a zero-padded decimal number.\", \"05\"],\n [\"%-S\", \"Second as a decimal number. (Platform specific)\", \"5\"],\n [\"%f\", \"Microsecond as a decimal number, zero-padded on the left.\", \"000000\"],\n [\"%z\", \"UTC offset in the form +HHMM or -HHMM (empty string if the the object is naive).\", \"\"],\n [\"%Z\", \"Time zone name (empty string if the object is naive).\", \"\"],\n [\"%j\", \"Day of the year as a zero-padded decimal number.\", \"273\"],\n [\"%-j\", \"Day of the year as a decimal number. (Platform specific)\", \"273\"],\n [\"%U\", \"Week number of the year (Sunday as the first day of the week) as a zero padded decimal number. All days in a new year preceding the first Sunday are considered to be in week 0.\", \"39\"],\n [\"%W\", \"Week number of the year (Monday as the first day of the week) as a decimal number. All days in a new year preceding the first Monday are considered to be in week 0.\", \"39\"],\n [\"%c\", \"Locale's appropriate date and time representation.\", \"Mon Sep 30 07:06:05 2013\"],\n [\"%x\", \"Locale's appropriate date representation.\", \"09/30/13\"],\n [\"%X\", \"Locale's appropriate time representation.\", \"07:06:05\"],\n [\"%%\", \"A literal '%' character.\", \"%\"]\n ]\n\n display(HTML(\n '<table><tr>{}</tr></table>'.format(\n '</tr><tr>'.join(\n '<td>{}</td>'.format('</td><td>'.join(str(_) for _ in row)) for row in data)\n )\n ))", "def get_kindle_strs(self):\n # Title/author\n title_line = '%s (%s)' % (self.title, self.author)\n\n # Metadata line\n highlight_type = '- Your %s' % (self.clip_type.capitalize())\n if (self.loc_range[0] == self.loc_range[1]):\n location_string = 'Location %d' % (self.loc_range[0])\n else:\n location_string = 'Location %d-%d' % (self.loc_range[0], self.loc_range[1])\n page_string = None\n if (self.page is not None):\n page_string = 'on Page %d' % self.page\n date_string = self.datetime.strftime('%A, %B %d, %Y %I:%M:%S %p')\n\n if (self.page is not None):\n metadata_line = '%s %s | %s | Added on %s' % (highlight_type, page_string, location_string, date_string)\n else:\n metadata_line = '%s %s | Added on %s' % (highlight_type, location_string, date_string)\n\n return (title_line, metadata_line, '\\n', self.clip_text)", "def get_descriptive_name(self):\r\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\r\n #Mostrar_Grande = long_name.upper()\r\n #return long_name.upper()\r\n #return Mostrar_Grande #Funciona Com Return TAMBÉM, mas olhe na linha 39 como seria necessário usar.\r\n print(long_name.upper())", "def title(string: str, level=H1) -> str:\n appended = []\n for lvl in range(0, level):\n appended.append(\"#\")\n return f\"{''.join(appended)} {string}\"", "def __repr__(self) -> str:\r\n\r\n saida = \"Format: \"\r\n x = self.getformat()\r\n for _ in range(len(x)):\r\n saida = f\"{saida}{x[_]}\"\r\n if _ < len(x)-1:\r\n saida += \", \"\r\n saida += \"\\n\"\r\n return saida", "def format_apa_style(full_name, year, title, city, state, publisher):\n split_name = full_name.split()\n\n if len(split_name) < 2:\n raise ValueError('Too few names')\n if len(split_name) > 2:\n raise ValueError('Too many names')\n\n first_initial = split_name[0][0]\n last_name = split_name[1]\n return '{0}, {1}. ({2}). {3}. {4}, {5}: {6}.'.format(last_name, first_initial,\n year, title,city,\n state, publisher)", "def _prettyfilename(self):\n return f'{self.title} ({self.subtype})'", "def output_plain_sep_title(title):\n print(f\"{plain_sep_mark}\\t{title}{plain_sep_mark}\")", "def get_formatted_name(first_name,last_name):\n full_name = f\"{first_name} {last_name}\"\n return full_name.title()" ]
[ "0.7094175", "0.6907892", "0.68420154", "0.6698707", "0.6661369", "0.6655837", "0.6552088", "0.6432714", "0.64024884", "0.6400669", "0.637345", "0.6352613", "0.6352252", "0.6263117", "0.61993533", "0.6197676", "0.6158429", "0.61383647", "0.61294615", "0.61219627", "0.61102825", "0.60996956", "0.6097603", "0.60603315", "0.6059054", "0.60563105", "0.605588", "0.60552156", "0.6023014", "0.60101295", "0.6004986", "0.60011286", "0.59964514", "0.5985295", "0.5984299", "0.59804523", "0.5972589", "0.59699935", "0.5958207", "0.5943801", "0.5937856", "0.59360665", "0.59287316", "0.59279585", "0.5918107", "0.591601", "0.5914867", "0.59101087", "0.5882889", "0.5876244", "0.58703995", "0.58674896", "0.58654124", "0.58603567", "0.58511156", "0.5848604", "0.58409655", "0.58278495", "0.5822181", "0.5802866", "0.57973814", "0.5795507", "0.57900757", "0.5787145", "0.57753456", "0.5761712", "0.57610375", "0.57560354", "0.5750368", "0.5748368", "0.5748368", "0.5745472", "0.57444894", "0.5744413", "0.5741126", "0.57409537", "0.57409537", "0.5739542", "0.5738941", "0.5737804", "0.5720991", "0.57199925", "0.57199925", "0.57199925", "0.57199925", "0.57199925", "0.57199925", "0.57199925", "0.57199925", "0.57108593", "0.5709702", "0.570436", "0.57009214", "0.5699692", "0.5695606", "0.5693794", "0.5685684", "0.5681132", "0.56793696", "0.5672393", "0.5669594" ]
0.0
-1
Takes a data dict and returns a data line.
def fill_line(self, dct): return self._line_format % self.list_values(dct)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def process_line(self, line, data):\n return data", "def _plot_dict_line(d, label=None):\n xvals, yvals = _dict2lists(d)\n if label:\n pylab.plot(xvals, yvals, label=label)\n else:\n pylab.plot(xvals, yvals)", "def writeLine(self, data):\n raise NotImplementedError()", "def _get_linedict(self, line):\n if (isinstance(line, dict) and\n (line in self.lines)):\n return line\n for linedict in self.lines:\n if line in linedict.itervalues():\n ld = linedict\n try:\n return ld\n except NameError:\n raise HKEPlotterLineDoesNotExistError(line)", "def getLineInformation(line):\n \n pass", "def format_line(line_data):\n output = []\n for column in COLUMNS:\n output.append(str(line_data[column]) if column != 'attributes' else format_attributes(line_data[column]))\n\n return '\\t'.join(output)", "def format_data(self, data):", "def get_data(self, line_id):\n # check\n if line_id not in self._lineDict:\n raise KeyError('Line ID %s does not exist.' % str(line_id))\n\n # get line\n line = self._lineDict[line_id]\n if line is None:\n raise RuntimeError('Line ID %s has been removed.' % line_id)\n\n return line.get_xdata(), line.get_ydata()", "def get_lines(m_data: List[Dict[str, Any]], line_tag: str) -> List[FpLine]:\n lines: List[FpLine] = list()\n for line in get_all_dicts_by_key(m_data, line_tag):\n lines.append(create_line(line, line_tag))\n return lines", "def pkt_line(data):\n if data is None:\n return b\"0000\"\n return (\"%04x\" % (len(data) + 4)).encode(\"ascii\") + data", "def map_data(self, data, properties):\n # look for width\n if 'width' in properties:\n self.width = data[properties.index('width')]\n self.options.append('line width=' + str(self.width))\n # look for tint\n if 'tint' in properties:\n self.tint = data[properties.index('tint')]\n color = self.tint_color + \"!\" + str(self.tint) + \"!\" + self.base_color\n else:\n color = self.base_color\n self.options.append('draw=' + color)\n # look for opacity\n if 'opacity' in properties:\n self.opacity = data[properties.index('opacity')]\n self.options.append('opacity=' + str(self.opacity))", "def formatRecvdData(data_recvd):\n\n\t##############\tADD YOUR CODE HERE\t##############\n\t\n\tx = PrettyTable()\n\t\t\n\tdict2 = eval(data_recvd)\n\t\t\n\tx.field_names=[\"OPTION_NUMBER\",\"OPTIONS\",\"ADDITIONAL INFO\"]\n\tfor key,value in dict2.items():\n\t\tvalue = str(value)\n\t\tif '{' in value:\n\t\t\t\td = eval(value)\n\t\t\t\tfor i,j in d.items():\n\n\t\t\t\t\tx.add_row([key,i,j])\n\t\t\t\t\n\t\telse:\n\t\t\t\t\n\t\t\tx.add_row([key,value,\"-\"])\n\tprint(x)\n\t\n\n\t##################################################", "def linedata():\n get_values = request.args\n pc = get_values.get('pc') is not None # Per Capita\n gr = get_values.get('gr') is not None # Growth Rate\n place_args, _ = get_place_args(get_values)\n plot_data, _ = datachart_handler.get_plot_data(place_args, pc, gr)\n return json.dumps(plot_data)", "def print_data_line(line, max_name, max_roomcode, max_type):\n print(\"|\", end='')\n print(line[0], end='')\n print(\"|\", end='')\n print(line[1], end='')\n print(\" \" * (max_name - len(line[1])), end='')\n print(\"|\", end='')\n print(line[4], end='')\n print(\" \" * (max_type - len(line[4])), end='')\n print(\"|\", end='')\n print(line[2], end='')\n print(\" \" * (max_roomcode - len(line[2])), end='')\n print(\"|\", end='')\n print(line[3], end='')\n print(\"|\")", "def string_dict(ds, headline='DICTIONARY:', offset=25):\n template = '{:%is} {}' % offset\n rows = [template.format('\"{}\":'.format(n), ds[n]) for n in sorted(ds)]\n s = headline + '\\n' + '\\n'.join(rows)\n return s", "def fetch_data(self, document, line):\n index = self.resolve_ref(document)\n return line[index]", "def _asline(key, val):\n return key + ': ' + '; '.join(val) + '\\n'", "def render_data(self):\n if self.data:\n #print \"ENTRY DATA: %s\" % type(self.data)\n #make sure that data is buffered with a blank line at the end\n #makes the resulting log easier to read.\n #if there are more than one blanklines, can leave them\n last_line = self.data.splitlines()[-1]\n #not re.match('\\s', last_line) and\n \n #are there characters in the last line? need to adjust if so:\n if re.search('\\S', last_line):\n if re.search('\\n$', last_line):\n self.data += \"\\n\"\n else:\n #self.data += \"\\n\"\n #web entries added will end up with 3 newlines somehow\n #but other entries created with a single string\n #won't have enough new lines...\n #should troubleshoot web entries\n self.data += \"\\n\\n\"\n\n return unicode(self.data)\n else:\n #*2011.11.17 16:44:15\n #if loaded from a file, data almost always has newlines in it\n #shouldn't ever get here in that case\n \n #print \"no data in this entry! : %s\" % self.render_first_line()\n return unicode('')", "def make_line_plot(data, x_label=\"Data\", y_label=\"Data Point\"):\n\n y = data\n x = range(len(y))\n\n plt.xlabel(x_label)\n plt.ylabel(y_label)\n plt.plot(x, y)\n plt.show()", "def print_dict(data):\n print data", "def format_dict(\n d: typing.Mapping[TTextType, TTextType]\n) -> typing.Iterator[TViewLine]:\n\n return format_pairs(d.items())", "def readData(self, dataDict):\n pass", "def line(value):\r\n return '({}, {}), ({}, {})'.format(value.x1(), value.y1(), value.x2(), value.y2())", "def insertLines(data):\n data = pd.DataFrame(data)\n for _,row in data.iterrows():\n insertLine(row)", "def send_lines(data: np.ndarray):\n # NOTE look into this maybe? https://stackoverflow.com/questions/49098466/plot-3d-convex-closed-regions-in-matplot-lib/49115448\n send(\"lines\", json.dumps(data.tolist()))", "def format(self, data):\r\n for name, value in sorted(data.items()):\r\n line = '{name} = {value}\\n'.format(\r\n name=name,\r\n value=value,\r\n )\r\n yield line", "async def add_line(self, data: str) -> t.Optional[Result]:\n self.buf += data\n try:\n # remove the last newline\n astob = ast_compile_interactive(self.buf[:-1])\n except Exception:\n self.buf = \"\"\n raise\n else:\n if astob is not None:\n self.buf = \"\"\n return (await self.eval_single(astob))\n return None", "def string_dict(d, headline='DICTIONARY:', offset=25):\n template = '{:%is} {}' % offset\n rows = [template.format('\"{}\":'.format(n), d[n]) for n in sorted(d)]\n s = headline + '\\n' + '\\n'.join(rows)\n return s", "def line_to_data(line):\n elems = line.strip().split(\"\\t\")\n assert len(elems) in [1,2]\n text = None\n label = None\n if len(elems) == 1:\n text = elems[0]\n if len(elems) == 2:\n text = elems[0]\n label = elems[1]\n return (text, label)", "def GetLine(line):\r\n pass", "def make_mwdata_line(c):\n mwdata = c['mwdata']\n r = str(mwdata['rank']) if 'rank' in mwdata else '?'\n rbds = str(mwdata['rank_bounds']).replace(\" \", \"\") if 'rank_bounds' in mwdata else '?'\n ar = str(mwdata['analytic_rank']) if 'analytic_rank' in mwdata else '?'\n ngens = str(len(mwdata['gens']))\n gens = encode_points(mwdata['gens'])\n hts = str(mwdata['heights']).replace(\" \", \"\")\n reg = str(mwdata['reg'])\n ntors = str(mwdata['torsion_order'])\n torstruct = str(mwdata['torsion_structure']).replace(\" \", \"\")\n tgens = encode_points(mwdata['torsion_gens'])\n output_fields = [c['field_label'], c['conductor_label'], c['iso_label'], str(c['number']),\n r, rbds, ar, ngens, gens, hts, reg,\n ntors, torstruct, tgens]\n return \" \".join(output_fields)", "def emit_line(handle, data):\n if len(data) > 254:\n if \" PAGE \" in data:\n data = data.replace(\"Record Group Title\", \"Record Group\")\n data = data.replace(\"Series Title\", \"Series\")\n data = data.replace(\n \"Washington, D.C.; Washington, D.C.;\", \"Washington, D.C.;\"\n )\n if len(data) > 254:\n logging.error(\"Truncating invalid line length: %s\", data)\n data = \"{0}\\n\".format(data[:254])\n handle.write(data)", "def extract_data(line):\n lines = line.split(' - ')\n return json.loads(lines[1])", "def line(\n self, x: Hashable | None = None, y: Hashable | None = None, **kwargs\n ) -> PlotAccessor:\n return self(kind=\"line\", x=x, y=y, **kwargs)", "def isdataline(self, line):\n #\n # Keyworded lines look like:\n #\n # Data line--- make check NONE\n #\n # Set up regular expression for keyword input lines\n data = self.compile(\"isdataline\", r\"^ Data line--- ([^\\n]*)\\n\").search(line)\n result = dict()\n if data:\n result[\"data_line_text\"] = data.group(0)\n result[\"data_line\"] = data.group(1)\n result[\"nlines\"] = data.group(0).count(\"\\n\")\n return result", "def handle_data(self, data):\n if len(data.strip()) > 0:\n self.table += data\n # print(data, end=\" \")", "def getDataBefore(self, data: ghidra.program.model.listing.Data) -> ghidra.program.model.listing.Data:\n ...", "def arduinoLineToDictionary(self, line):\n\n # Temperatures have the following syntax:\n # name=TempX,serial_num=blahblah,value=50,units=F\n\n # Analog pin outputs have the following syntax:\n # analogpin:name=PinX,pin_num=X,value=val\n\n # Digital pins have the following syntax:\n # digitalpin:name=PinX,pin_num=X,value=val\n\n # For the temperature sensors, it has to find the serial number, then populate the\n # Temperature value in the ard_dictionary\n\n # For the pressure sensors, it has to find an analog pin_num, then match that to the one in the ard_dictionary\n\n # For the heaters and pumps, it has to find a digital pin_num, then match that to the one in the ard_dictionary\n\n # Split the serial read text by colons to get the type of data and the data itself\n serial_read_input_list = line.split(\":\")\n\n try:\n # First entry is the type, the rest is the data\n type_of_data = serial_read_input_list[0]\n data = serial_read_input_list[1]\n except:\n return\n\n if type_of_data == \"tempsensor\":\n self.process_temp_data(data)\n\n if type_of_data == \"analogpin\":\n self.process_pressure_data(data)\n\n if type_of_data == \"digitalpin\":\n self.process_heater_pump_data(data)", "def add_lineage(self, data: AddLineage) -> Dict[str, Any]:\n try:\n self.client.put(self.get_suffix(AddLineage), data=data.json())\n except APIError as err:\n logger.error(\n f\"Error {err.status_code} trying to PUT lineage for {data.json()}\"\n )\n raise err\n\n from_entity_lineage = self.get_lineage_by_id(\n data.edge.fromEntity.type, str(data.edge.fromEntity.id.__root__)\n )\n\n return from_entity_lineage", "def _format_data(self) -> None:\n for row in self._db_data:\n if row['age_start'] is None:\n continue\n # entry = {'x': 'Celkem', 'y': int(row['count'])}\n elif row['age_start'] == 95:\n entry = {'x': f\"{int(row['age_start'])}+\", 'y': int(row['count'])}\n else:\n entry = {'x': f\"{int(row['age_start'])}-{int(row['age_start'])+4}\", 'y': int(row['count'])}\n self.return_data['data'].append(entry)", "def from_data(data_dict, intrusion=None):\n\n\t\t# Data is verified in the ctor and setters\n\t\treturn LogEntry(vin=data_dict[LogEntry.VIN_FIELD], app_id=data_dict[LogEntry.APP_ID_FIELD],\n\t\t\tlevel=data_dict[LogEntry.LEVEL_FIELD], log_message=data_dict[LogEntry.LOG_MESSAGE_FIELD],\n\t\t\tgps_position=data_dict[LogEntry.GPS_POSITION_FIELD],\n\t\t\ttime_unix=data_dict[LogEntry.TIME_UNIX_FIELD], log_id=data_dict[LogEntry.LOG_ID_FIELD],\n\t\t\tintrusion=intrusion)", "def line(points):\n return LineString(points)", "def parse_string_line(self, data_line):\n if data_line:\n data_line = data_line.rstrip()\n if data_line:\n if data_line[0] == '#':\n extraparams = json.loads(data_line[1:])\n if 'glyph_cap_line' in extraparams:\n self.__capline = extraparams['glyph_cap_line']\n if 'glyph_base_line' in extraparams:\n self.__baseline = extraparams['glyph_base_line']\n if 'glyph_bottom_line' in extraparams:\n self.__bottomline = extraparams['glyph_bottom_line']\n elif len(data_line) > 9:\n strokes = []\n xmin = xmax = ymin = ymax = None\n # individual strokes are stored separated by <space>+R\n # starting at col 11\n for s in split(data_line[10:], ' R'):\n if len(s):\n stroke = list(zip(map(self.__char2val, s[::2]), map(self.__char2val, s[1::2])))\n xmin = min(stroke + ([xmin] if xmin else []), key=lambda t: t[0])\n ymin = min(stroke + ([ymin] if ymin else []), key=lambda t: t[1])\n xmax = max(stroke + ([xmax] if xmax else []), key=lambda t: t[0])\n ymax = max(stroke + ([ymax] if ymax else []), key=lambda t: t[1])\n strokes.append(stroke)\n self.__charcode = int(data_line[0:5])\n self.__left_side = self.__char2val(data_line[8])\n self.__right_side = self.__char2val(data_line[9])\n self.__strokes = strokes\n self.__xmin, self.__ymin, self.__xmax, self.__ymax = (xmin[0], ymin[1], xmax[0], ymax[1]) if strokes else (0, 0, 0, 0)\n return True\n return False", "def write_data_line(self, pm_25, pm_10, gps_data):\n self.file.write(str(int(time.time()))) # Unix Time)\n self.file.write(';' + datetime.now().strftime(\"%d.%m.%y %H:%M:%S\")) # Human Readable Time\n self.file.write(';' + str(pm_25)) # pm 2.5 \n self.file.write(';' + str(pm_10)) # pm 10 \n self.file.write(';' + str(gps_data['fix'])) # has fix \n self.file.write(';' + str(gps_data['lon'])) # longitude \n self.file.write(';' + str(gps_data['lat'])) # latitude \n self.file.write(';' + str(gps_data['alt'])) # altitude \n self.file.write(';' + str(gps_data['time'])) # gps unix time \n self.file.write('\\n')\n self.file.flush()", "def print_data(lines):\r\n # looping with indexes as first row needs to be printed differently\r\n for i, j in enumerate(lines):\r\n if i == 0:\r\n first_line(j.split())\r\n else:\r\n other_lines(j.split())", "def addData(self,data):\n\t\tif isinstance(data,list):\n\t\t\tif isinstance(data[0],dict):\n\t\t\t\tself.data.extend(data)\n\t\t\telif isinstance(data[0],list):\t\n\t\t\t\tfor r in data:\n\t\t\t\t\tacc= dict()\n\t\t\t\t\tfor h in self.header:\n\t\t\t\t\t\tacc[h]=r[self.header.index(h)]\t\n\t\t\t\t\tself.data.append(acc) \n\t\t\telse:\n\t\t\t\tself.data.append(dict(zip(self.header,data)))\n\t\telif isinstance(data,dict):\n\t\t\tself.data.append(data)\n\t\telse:\n\t\t\traise datatools.WrongTypeError(data)", "def get_linestring(value):\n \n line = value['line']\n coords = [(x['x'], x['y']) for x in line]\n return geojson.Feature(\n geometry=geojson.LineString(coords),\n properties=value\n )", "def render(data_dict, *args, **kwargs):", "def get(self, line_id: int, **options) -> Dict:\n return self._call(f\"{line_id}\", **options)", "def _find_product_line_data(self, product, supplier, shop, extra):\n for line_data in self._data_lines:\n if self._compare_line_for_addition(line_data, product, supplier, shop, extra):\n return line_data", "def data() -> str:\n return \"1721\\n979\\n366\\n299\\n675\\n1456\"", "def like(obj, **kwargs):\n\n # Create a new line object that is the same as the provided line.\n # Set the initialize leyword to False to skip initializing the data attribute.\n new_line = empty_like(obj, initialize=False, **kwargs)\n\n # Set the new line data attribute to a copy of the provided line's data\n new_line.data = obj.data.copy()\n\n return new_line", "def _prepare_analytic_line(self, cr, uid, obj_line, context=None):\n return {'name': obj_line.name,\n 'date': obj_line.date,\n 'account_id': obj_line.analytic_account_id.id,\n 'unit_amount': obj_line.quantity,\n 'product_id': obj_line.product_id and obj_line.product_id.id or False,\n 'product_uom_id': obj_line.product_uom_id and obj_line.product_uom_id.id or False,\n 'amount': (obj_line.credit or 0.0) - (obj_line.debit or 0.0),\n 'general_account_id': obj_line.account_id.id,\n 'journal_id': obj_line.journal_id.analytic_journal_id.id,\n 'ref': obj_line.ref,\n 'move_id': obj_line.id,\n 'user_id': uid,\n }", "def format(self, data):", "def do_add(self, line):\n # Split the input argument to obtain the data\n raw_data = list (arg.lower () for arg in str (line).split ())\n\n try:\n # Check if input data has 7 data fields\n if not len (raw_data) == len (Data):\n raise AttributeError (\"Please input correct data.\")\n else:\n # Check and wash data by check_all() of DataValidator\n result = self._vld.check_all (raw_data)\n # Check if there is any None which stands for invalid input\n if None in result:\n key = 0\n # build a list of name list\n items = list (map (lambda i: i.name, Data))\n e_str = \"\"\n while key < len (result):\n if result[key] is None:\n # Left alignment\n e_str += \"{:<10}\".format (items[key])\n key += 1\n raise ValueError (\"The following field(s) is invalid:\\n%s\" % e_str)\n else:\n self._shw.add_data (result)\n except (AttributeError, ValueError) as e:\n View.error (str (e) + \"\\n\")\n View.help_add ()\n except CSVError as e:\n View.error (e)\n except Exception as e:\n View.error (e)\n else:\n View.success (\"Add data\")", "def build_column(data: List[Dict[str, Any]]) -> str:\n return \"\\n\".join(map(format_item, data[\"items\"][:5]))", "def package_data(data_dict):\r\n return json.dumps(data_dict)", "def show_line(dict, xlabel=\"x\", ylabel=\"y\", title=\"title\"):\n plt.clf()\n plt.cla()\n plt.plot(list(dict.keys()), list(dict.values()), alpha=0.4, color = 'g')\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n plt.title(title)\n plt.show()", "def log(data):\n items = []\n for key, value in data.items():\n if value is None:\n items.append('[{}]'.format(key))\n else:\n items.append('[{} {}]'.format(key, value))\n print(' '.join(items))", "def create_data_record(self, data_dict):\n source_dict = deepcopy(data_dict)\n assert not self.is_conflicting_keys(data_dict,\n self.default_values), \"Conflicting keys between default_values and extra_values\"\n source_dict.update(self.default_values)\n return {\n '_index': self.get_full_index(),\n '_type': 'python_log',\n '_source': source_dict\n }", "def get_data(data):\n depart = data[0].lstrip(\" \")\n aiport_time = data[1].lstrip(\" \")\n duration = data[3].rstrip(\"<\").lstrip(\" \")\n return f\"[b][yellow]{depart}[/b]\\n[green]{aiport_time}\\n{duration}\"", "def create_log_entry(data):\n\n if '__iter__' not in data.__dir__():\n return BadArgumentError(type(data))\n\n log_entry = str()\n\n for point in data:\n\n if '__iter__' in point.__dict__():\n joined_point = ','.join(point)\n log_entry += str(joined_point)\n else:\n log_entry += str(point) + ','\n\n return log_entry[:-1]", "def line(self):\n return self[\"line\"]", "def line(self):\n return self[\"line\"]", "def line(self):\n return self[\"line\"]", "def line(self):\n return self[\"line\"]", "def handle_input(data: dict):", "def get_line_item(self, *args):\n return _ida_hexrays.cfunc_t_get_line_item(self, *args)", "def parse_datum( self, data ):\n return data", "def parse_datum( self, data ):\n return data", "def _draw_line(plot, hori, vert, color, text):\n plot.plot(hori, vert, '-o'+color)\n plot.text(hori[-1]-3, vert[-1]+2, text, color=color)", "def append(self, *data):\n super(TextDataWriter, self).append(*data)\n dline = []\n for c, d in zip(self.column_descriptions, data):\n if is_sequence(d):\n for x in d:\n dline.append(c.format(x))\n else:\n dline.append(c.format(d))\n self.fo.write(self.separator.join(dline))\n self.fo.write('\\n')", "def _handle_create_line(self, axes, style_args):\n stream_data = self.server.stream_data\n # sample data for initial create\n x_data = numpy.arange(0, 2, 1)\n y_data = numpy.array([0]*2)\n\n line, = axes.plot(x_data, y_data, '-', **style_args)\n # NOTE: client may set 'label'\n line_name = style_args['label']\n if line_name in stream_data:\n # preserve old line data with a new name\n stream_data[line_name+\"_old_\"+timestamp()] = stream_data[line_name]\n # always start with no data for the new line\n stream_data[line_name] = {'y': [], 'line': line, 'last_len': 0}\n if FLAGS.timestamp:\n stream_data[line_name]['x'] = []\n return line_name", "def _getdata(self, data):\n lines = []\n start_date = str(data['form']['start_date'])\n end_date = str(data['form']['end_date'])\n department_ids = data['form']['department_ids']\n\n vehicles_ids = self.pool.get('fleet.vehicle').search(self.cr, self.uid,\\\n [('department_id', 'in', department_ids)], context=self.context)\n\n fuel_qty_line_obj = self.pool.get('fuel.qty.line')\n\n sdate = datetime.strptime(start_date, \"%Y-%m-%d\")\n syear = sdate.year\n smonth = sdate.month\n edate = datetime.strptime(end_date, \"%Y-%m-%d\")\n eyear = edate.year\n emonth = edate.month\n\n fuel_qty_line_ids = fuel_qty_line_obj.search(self.cr, self.uid,\\\n [('vehicles_id', 'in', vehicles_ids)], context=self.context)\n\n\n\n counter = 1\n for qty_line in fuel_qty_line_obj.browse(self.cr, self.uid, \\\n fuel_qty_line_ids, context=self.context):\n current_m = int(qty_line.month)\n current_y = int(qty_line.year)\n start = current_m >= smonth and current_y >= syear\n end = current_m <= emonth and current_y <= eyear\n if start and end:\n line = {'type':str(counter)+\" : \"+\\\n qty_line.vehicles_id.type.name}\n line['vehicle_no'] = qty_line.vehicles_id.vin_sn\n line['spent'] = qty_line.spent_qty\n line['counter_no'] = str(qty_line.vehicles_id.odometer)+\" \"+\\\n qty_line.vehicles_id.odometer_unit\n line['date'] = qty_line.month+\"/\"+qty_line.year\n lines.append(line)\n counter += 1\n return lines", "def build_export_row(self, data:dict) -> dict:\n data = {k : v for k, v in data.items() if v is not np.nan}\n for key, value in data.items():\n if key in _column_types:\n if _column_types[key] == list:\n if isinstance(value, (list, set, tuple)):\n data[key] = LIST_DELIMITER.join(value)\n else:\n data[key] = str(value)\n elif _column_types[key] == bool:\n try:\n data[key] = bool(value)\n except:\n data[key] = False\n else:\n data[key] = str(value)\n return data", "def make_mwdata_lines(cl):\n return \"\\n\".join([make_mwdata_line(\n {\n 'field_label': cl['field_label'],\n 'conductor_label': cl['conductor_label'],\n 'iso_label': cl['iso_label'],\n 'number': i+1,\n 'mwdata': mw,\n }\n ) for i, mw in enumerate(cl['mwdata'])])", "def __call__( self, line ):\n return self.__getitem__( line )", "def __call__( self, line ):\n return self.__getitem__( line )", "def print_data(data):\n print(str(data))\n return data", "def isDataLine(line):\n if len(line) > 1:\n return line[0] != \"#\"\n return False", "def isDataLine(line):\n if len(line) > 1:\n return line[0] != \"#\"\n return False", "def get_line_data(line):\n line_parts = line.split(\",\")\n\n cast_player = line_parts[2].strip('\"').split(\"-\")[0]\n target = line_parts[6].strip('\"')\n if \"-\" in target:\n target = target.split(\"-\")[0]\n\n spell_id = line_parts[9]\n spell_name = line_parts[10]\n\n return (spell_id, spell_name, cast_player, target)", "def _hLine(self, y):\n left, _top, width, _height = self.plot.getPlotBoundsInPixels()\n\n dataPos1 = self.plot.pixelToData(left, y, check=False)\n dataPos2 = self.plot.pixelToData(left + width, y, check=False)\n return dataPos1, dataPos2", "def csv_dict_reader(file_obj, data = [], cost = []):\n reader = csv.DictReader(file_obj, delimiter=',')\n for line in reader:\n data.append(line[\"Дата\"]),\n cost.append(line[\"Расход\"])", "def _joiner(self, data):\n\n # presimplify linestrings if required\n if self.options.presimplify > 0:\n # set default if not specifically given in the options\n if type(self.options.presimplify) == bool:\n simplify_factor = 2\n else:\n simplify_factor = self.options.presimplify\n\n data[\"linestrings\"] = simplify(\n data[\"linestrings\"],\n simplify_factor,\n algorithm=self.options.simplify_algorithm,\n package=self.options.simplify_with,\n input_as=\"linestring\",\n prevent_oversimplify=self.options.prevent_oversimplify,\n )\n\n # compute the bounding box of input geometry\n lsbs = bounds(data[\"linestrings\"])\n ptbs = bounds(data[\"coordinates\"])\n data[\"bbox\"] = compare_bounds(lsbs, ptbs)\n\n if not data[\"linestrings\"] and not data[\"coordinates\"]:\n data[\"junctions\"] = self._junctions\n return data\n\n # prequantize linestrings if required\n if self.options.prequantize > 0:\n # set default if not specifically given in the options\n if type(self.options.prequantize) == bool:\n quant_factor = 1e6\n else:\n quant_factor = self.options.prequantize\n\n data[\"linestrings\"], data[\"transform\"] = quantize(\n data[\"linestrings\"], data[\"bbox\"], quant_factor\n )\n\n data[\"coordinates\"], data[\"transform\"] = quantize(\n data[\"coordinates\"], data[\"bbox\"], quant_factor\n )\n\n if not self.options.topology or not data[\"linestrings\"]:\n data[\"junctions\"] = self._junctions\n return data\n\n if self.options.shared_coords:\n\n def _get_verts(geom):\n # get coords of each LineString\n return [x for x in geom.coords]\n\n geoms = {}\n junctions = []\n\n for ls in data[\"linestrings\"]:\n verts = _get_verts(ls)\n for i, vert in enumerate(verts):\n ran = geoms.pop(vert, None)\n neighs = sorted(\n [verts[i - 1], verts[i + 1 if i < len(verts) - 1 else 0]]\n )\n if ran and ran != neighs:\n junctions.append(vert)\n geoms[vert] = neighs\n\n self._junctions = [geometry.Point(xy) for xy in set(junctions)]\n else:\n\n # create list with unique combinations of lines using a rdtree\n line_combs = select_unique_combs(data[\"linestrings\"])\n\n # iterate over index combinations\n for i1, i2 in line_combs:\n g1 = data[\"linestrings\"][i1]\n g2 = data[\"linestrings\"][i2]\n\n # check if geometry are equal\n # being equal meaning the geometry object coincide with each other.\n # a rotated polygon or reversed linestring are both considered equal.\n if not g1.equals(g2):\n # geoms are unique, let's find junctions\n self._shared_segs(g1, g2)\n\n # self._segments are nested lists of LineStrings, get coordinates of each nest\n s_coords = []\n for segment in self._segments:\n s_coords.extend(\n [\n [\n (x.xy[0][y], x.xy[1][y])\n for x in segment\n for y in range(len(x.xy[0]))\n ]\n ]\n )\n\n # only keep junctions that appear only once in each segment (nested list)\n # coordinates that appear multiple times are not junctions\n for coords in s_coords:\n self._junctions.extend(\n [geometry.Point(i) for i in coords if coords.count(i) == 1]\n )\n\n # junctions can appear multiple times in multiple segments, remove duplicates\n self._junctions = [\n loads(xy) for xy in list(set([x.wkb for x in self._junctions]))\n ]\n\n # prepare to return object\n data[\"junctions\"] = self._junctions\n\n return data", "def map_line(**kwargs):\n\n p = copy.copy(params)\n for key,val in kwargs.items():\n setattr(p,key,val)\n\n GR = glo.global_results()", "def _(event):\n line.insert_text(event.data * event.arg)", "def find_line_by_line_id(self, line_id):\n for line in self._data_lines:\n if six.text_type(line.get(\"line_id\")) == six.text_type(line_id):\n return line\n return None", "def _separate_raw_data(self, raw_data):\n for key, value in raw_data.items():\n if type(value) == dict:\n self.data_dict[key] = value\n elif type(value) == list:\n self.data_list[key] = value", "def add_data(self, data: List[dict]):\n raise NotImplementedError()", "def _data_lines(self):\n self._load()\n return self._data.setdefault(\"lines\", [])", "def _build_data_from_text(self, text):\n try:\n record = json.loads(text)\n except Exception as e:\n logging.error(f\"Exception: {e}\")\n logging.error(f\"datapoint: {text}\")\n raise e\n return record", "def dataFromLine(sLine):\r\n\tiStart = sLine.find(\"* \")\r\n\tif iStart < 0:\r\n\t\treturn \"\"\r\n\tiStart += 2\r\n\treturn dataFromString(sLine, iStart)", "def agline(line):\n\n vals = {}\n x = ['date', 'movie', 'offset', 'well', 'gen', 'flarem', 'flares', \n 'chargem', 'charges', 'charget', 'escdm', 'escds', 'escddur', 'escmm', 'escms', \n 'escmdur']\n y = line.strip('\\n').split(',')[0:16]\n z = zip(x, y)\n\n for item in z:\n vals[item[0]] = item[1]\n\n return(vals)", "def print_data(**kwargs):\n return f'Имя: {kwargs.get(\"name\")}, Фамилия: {kwargs.get(\"surname\")}, Год рождения: {kwargs.get(\"birth_year\")},' \\\n f' Город проживания: {kwargs.get(\"city\")}, Адрес эл. почты: {kwargs.get(\"email\")},' \\\n f' Номер телефона: {kwargs.get(\"phone\")}.'", "def getDataAfter(self, data: ghidra.program.model.listing.Data) -> ghidra.program.model.listing.Data:\n ...", "def addData(self, d):\n self.__populateDict(self._data, d)", "def to_line(self) -> str:\n bytecount = len(self.data)\n nums = bytearray()\n nums.append(bytecount)\n nums.extend(struct.pack(\">H\", self.address))\n nums.append(self.typ)\n nums.extend(self.data)\n crc = sum(nums)\n crc = ((~crc) + 1) & 0xFF\n nums.append(crc)\n line = \":\" + binascii.hexlify(nums).decode(\"ascii\")\n return line", "def get_line():\n random.seed(datetime.utcnow().microsecond)\n dt = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')\n event = random.choice(['event1', 'event2', 'event3'])\n return '{};{}\\n'.format(dt, event).encode('utf-8')", "def print_data(data, logger=None, **kwargs):\n print data\n\n return data" ]
[ "0.6137988", "0.57134515", "0.5674397", "0.5633335", "0.55844647", "0.55069125", "0.54839", "0.5479354", "0.54715055", "0.537579", "0.53699166", "0.5352207", "0.5318781", "0.5310787", "0.52863955", "0.5243444", "0.5207458", "0.5203357", "0.5161909", "0.515372", "0.51435477", "0.51400757", "0.5134475", "0.5127887", "0.512383", "0.51109505", "0.5102888", "0.5080274", "0.5079057", "0.50769037", "0.5065883", "0.5055024", "0.5052038", "0.505043", "0.5044154", "0.5037069", "0.5032951", "0.5023096", "0.50209755", "0.5020865", "0.50168353", "0.5001602", "0.49943736", "0.4993414", "0.49835294", "0.49763218", "0.49751773", "0.4963171", "0.4946405", "0.49397334", "0.4938872", "0.49376246", "0.49354973", "0.49339733", "0.4922325", "0.4920499", "0.4915645", "0.49137613", "0.48932767", "0.48925188", "0.48862967", "0.48827893", "0.488135", "0.488135", "0.488135", "0.488135", "0.4871798", "0.4871635", "0.48681572", "0.48681572", "0.48666036", "0.48650807", "0.4860325", "0.48477975", "0.48433492", "0.48404858", "0.48372474", "0.48372474", "0.4831691", "0.48280415", "0.48280415", "0.482656", "0.48204926", "0.4813462", "0.48103526", "0.47992793", "0.47972697", "0.47961906", "0.47845966", "0.4779291", "0.4776206", "0.47743177", "0.477265", "0.47652823", "0.4755665", "0.47433946", "0.47391447", "0.47389144", "0.47348246", "0.47311312" ]
0.56865346
2
if mouse is outside the window, the paddle will be at the edge.
def paddle_reset_position(self, mouse): if (0 + self.paddle.width / 2) <= mouse.x <= (self.window.width - self.paddle.width / 2): self.paddle_x = mouse.x - self.paddle.width / 2 self.window.add(self.paddle, self.paddle_x, self.paddle_y)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def paddle_moving(self, mouse):\n # when the paddle is in the window\n if 0 + self.paddle.width/2 <= mouse.x <= self.window.width - self.paddle.width/2:\n self.paddle.x = mouse.x - self.paddle.width / 2\n\n # when the paddle is about to leave the left side of the window\n elif mouse.x < 0 + self.paddle.width/2:\n self.paddle.x = 0\n\n # when the paddle is about to leave the right side of the window\n elif mouse.x > self.window.width - self.paddle.width/2:\n self.paddle.x = self.window.width - self.paddle.width\n\n # the paddle's y coordinate will always be at the same as below\n self.paddle.y = self.window.height - self.paddle_offset", "def update(self):\n # Get where the mouse is\n pos = pygame.mouse.get_pos()\n # Set the left side of the player bar to the mouse position\n self.rect.x = pos[0]\n # Make sure we don't push the player paddle \n # off the right side of the screen\n if self.rect.x > self.screenwidth - self.width:\n self.rect.x = self.screenwidth - self.width", "def handle_pygame_event(self, event):\n if event.type != MOUSEMOTION:\n # nothing to do\n return\n self.model.paddle.x = event.pos[0]-self.model.paddle.width/2.0", "def update(self):\r\n # Get where the mouse is\r\n pos = pygame.mouse.get_pos()\r\n # Set the left side of the player bar to the mouse position\r\n self.rect.x = pos[0]\r\n # Make sure we don't push the player paddle\r\n # off the right side of the screen\r\n if self.rect.x > self.screenwidth - self.width:\r\n self.rect.x = self.screenwidth - self.width", "def __set_paddle_position(self):\n self.__window.remove(self.__paddle)\n self.__window.add(self.__paddle, (self.__window.width - self.__paddle.width) / 2,\n self.__window.height - self.__paddle_offset)", "def reset_paddle(self):\r\n self.y = self.screen_Height // 2\r\n self.vy = 0", "def __init__(self):\n self.center = Point()\n #x coordinate is set in these amount of pixels to leave a slight gap between the screen and paddle just like in real pong video games\n self.center.x = SCREEN_WIDTH - 10\n #when game starts, paddle is placed on the middle of screen's right edge\n self.center.y = SCREEN_HEIGHT / 2", "def move_down(self):\n #if user moves paddle right below on the screen, they won't be able to move it more downwards by using this if statement\n #SCREEN_HEIGHT - 280 = Exact number of pixels where paddle can stop exactly on bottom edge but still has its body fully shown\n if self.center.y > SCREEN_HEIGHT - 280:\n self.center.y -= MOVE_AMOUNT", "def edges(self):\r\n # appearing on other side of the window #\r\n \r\n # if boid gets out of the margins, it shows on the other side of the window\r\n \r\n if self.position.x > self.width:\r\n self.position.x = 0\r\n elif self.position.x < 0:\r\n self.position.x = self.width\r\n\r\n if self.position.y > self.height:\r\n self.position.y = 0\r\n elif self.position.y < 0:\r\n self.position.y = self.height\r\n \r\n # bouncing from the window #\r\n \r\n # if self.position.x > self.width or self.position.x < 0:\r\n # self.velocity.x = -self.velocity.x\r\n # if self.position.y > self.height or self.position.y < 0: \r\n # self.velocity.y = -self.velocity.y\r", "def hit_wall(self):\n if self.ball.x <= 0 or self.ball.x + self.ball.width > self.window.width:\n self.__dx = -self.__dx\n if self.ball.y <= 0:\n self.__dy = -self.__dy", "def isOutsideBorder(self):\n if (self.posX < -self.myGalaxy.worldWidth or self.posX > self.myGalaxy.worldWidth or\n self.posY < -self.myGalaxy.worldHeight or self.posY > self.myGalaxy.worldHeight):\n return 1\n return 0", "def updatePaddle(self, touch):\n #first click\n if (touch != None and self._last == None):\n self._clickdist = touch.x - self._paddle.x\n \n #click hold - paddle movement\n if (self._last != None and touch != None):\n self._paddle.x = touch.x - self._clickdist\n \n #prevent paddle from extending past right edge\n if self._paddle.x > (GAME_WIDTH - PADDLE_WIDTH):\n self._paddle.x = GAME_WIDTH - PADDLE_WIDTH\n \n #prevent paddle from extending past left edge\n if self._paddle.x < 0:\n self._paddle.x = 0\n self._last = touch", "def update(self):\r\n self.x = 60\r\n self.y = games.mouse.y\r\n self.check_collide()", "def update(self):\n pos = pygame.mouse.get_pos()\n self.rect.midtop = pos\n if self.punching:\n self.rect.move_ip(5, 10)", "def update(self):\n self.x = games.mouse.x\n #self.y = games.mouse.y\n self.check_collide()", "def move_up(self):\n #if user moves paddle right on top of screen, they won't be able to move it more upwards by using this if statement\n #SCREEN_HEIGHT - 20 = Exact number of pixels where paddle can stop exactly on top edge but still has its body fully shown\n if self.center.y < SCREEN_HEIGHT - 20:\n self.center.y += MOVE_AMOUNT", "def IsMouseWellOutsideWindow(self):\r\n \r\n screen_rect = self.GetScreenRect() \r\n screen_rect.Inflate(50, 50)\r\n \r\n return not screen_rect.Contains(wx.GetMousePosition())", "def update(self):\n pos = pygame.mouse.get_pos()\n self.rect.midtop = pos\n if self.punching:\n self.rect.move_ip(5, 10) # move fist position in place", "def draw_horizontal_paddle(self):\n pygame.draw.rect(self.screen, self.color, self.top_rect)\n pygame.draw.rect(self.screen, self.color, self.bot_rect)", "def bottom_left_option():\n active = get_active_window()\n Width=get_corner_Width(active)\n Height=get_bottom_Height()\n PosX = get_left_PosX(active,Width)\n PosY=get_bottom_PosY()\n move_window(active,PosX,PosY,Width,Height)\n raise_window(active)", "def update(self):\r\n self.x = games.mouse.x\r\n self.y = games.mouse.y\r\n self.check_collide() # New for Rev2.0\r\n \r\n if self.left <0:\r\n self.left = 0\r\n \r\n if self.right > games.screen.width:\r\n self.right = games.screen.width\r\n \r\n self.check_collide()", "def update(self):\r\n self.x = games.mouse.x\r\n self.y = games.mouse.y\r\n self.check_collide()", "def show_paddle(self, screen, fgColor):\r\n if self.player_Num == 1:\r\n pygame.draw.rect(screen, fgColor, pygame.Rect((0, self.y, self.Width, self.Height)))\r\n elif self.player_Num == 2:\r\n pygame.draw.rect(screen, fgColor, pygame.Rect((self.screen_Width-self.Width, self.y, self.Width, self.Height)))", "def detectPaddleCollision(self, paddle):\n if paddle.contains(self.left,self.top) and self._vy < 0:\n self.bouncesound.play()\n self.verticalBounce()\n self._vx = random.uniform(5.0, 15.0)\n #print 'topright paddle collision'\n if paddle.contains(self.left,self.bottom) and self._vy < 0:\n self.bouncesound.play()\n self.verticalBounce()\n self._vx = random.uniform(5.0, 13.0)\n #print 'bottomright paddle collision'\n if paddle.contains(self.right,self.top) and self._vy < 0:\n self.bouncesound.play()\n self.verticalBounce()\n self._vx = random.uniform(5.0, 13.0)\n #print 'topleft paddle collision'\n if paddle.contains(self.right,self.bottom) and self._vy < 0:\n self.bouncesound.play()\n self.verticalBounce()\n self._vx = random.uniform(-15.0,-5.0)\n #print 'bottomleft paddle collision'", "def _movePaddle(self):\n self._click()\n self._game.updatePaddle(self._touch)\n self._last = self._touch", "def update(self):\n self.x = games.mouse.x\n self.y = games.mouse.y\n self.check_collide()", "def check_boundary(self):\n\n\t\tif self.Bubble_initial_pos[0] <= self.Bubble_radius or self.Bubble_initial_pos[0] >= self.tk_pic.width - self.Bubble_radius:\n\t\t\tself.Bubble_vel[0] = -self.Bubble_vel[0]", "def mousePosition(self):", "def hit_paddle(self):\n pass\n\n #Implement if collision with paddle is detected\n\n #Add randomness to how ball direction will change and return value", "def boundary(self):\n if self.pos.x < 0:\n self.pos.x = 0\n if self.pos.x > WIDTH - 48:\n self.pos.x = WIDTH - 48\n if self.pos.y < 0:\n self.pos.y = 0\n if self.pos.y > HEIGHT - 48:\n self.pos.y = HEIGHT - 48\n\n self.rect.topleft = self.pos", "def check_collision(self):\n if self.window.get_object_at(self.ball.x,self.ball.y+self.radius*2) is self.paddle:\n self.bounce()\n if self.window.get_object_at(self.ball.x+self.radius*2,self.ball.y+self.radius*2) is self.paddle:\n self.bounce()", "def checkEdges( self ):\n\t\tx, y = self.position.xy\n\t\tvx, vy = self.velocity.xy\n\t\t\n\t\t#if particle hit left or right wall\n\t\tif abs( x ) > WINDOW_X - self.r:\n\t\t\t#change vertical speed\n\t\t\tvx *= -1\n\t\t\t\n\t\t#if particle hit top or bottom wall\n\t\tif abs( y ) > WINDOW_Y - self.r:\n\t\t\t#change horizontal speed\n\t\t\tvy *= -1\n\t\t\n\t\t#enter new velocity\n\t\tself.velocity.xy = (vx, vy)", "def __init__(self, myCanvas, color, paddleW, paddleH, yAxisPos):\n self.canvas = myCanvas\n self.id = myCanvas.create_rectangle(0, 0, paddleW, paddleH, fill=color)\n\n # Getting height and width of current window\n self.canvas_width = self.canvas.winfo_width()\n self.canvas_height = self.canvas.winfo_height()\n\n # Horizontal Scroll\n self.x = 0\n\n # Centering from width and setting height as per yAxisPos\n self.canvas.move(self.id,\n (self.canvas_width//2) - paddleW // 2,\n ((int(self.canvas_height * yAxisPos)) - (paddleH//2)))\n\n # Binding Arrow Keys\n self.canvas.bind_all('<KeyPress-Left>', self.turn_left)\n self.canvas.bind_all('<KeyPress-Right>', self.turn_right)", "def mouseReleaseEvent(self, event):\n self.box_begin = self.begin\n self.box_end = event.pos()\n self.begin = event.pos()\n self.end = event.pos()\n if not self.permanent_show:\n self.update()", "def mouseReleased():\n if not game_controller.game_over:\n if game_controller.falling_disk and \\\n game_controller.falling_disk.y_vel == 0:\n game_controller.handle_mouseReleased()", "def _step_their_paddle(self):\n if random.random() < self.their_update_probability:\n if self.paddle_l.y < self.ball.y:\n if self.paddle_l.top_bound < self.top_bound:\n self.paddle_l.up()\n else:\n if self.paddle_l.bottom_bound > self.bottom_bound:\n self.paddle_l.down()", "def center_horizontal_paddle(self):\n self.top_center = self.screen_rect.centerx - (self.screen_rect.centerx/2)\n self.bot_center = self.screen_rect.centerx - (self.screen_rect.centerx/2)", "def update(self):\r\n if self.left<0:\r\n self.left=0\r\n if self.right>games.screen.width:\r\n self.right=games.screen.width\r\n if games.keyboard.is_pressed(games.K_LEFT):\r\n self.x-=5\r\n if games.keyboard.is_pressed(games.K_RIGHT):\r\n self.x+=5\r\n self.checkball()\r\n self.points.right=games.screen.width-5", "def __handle_wall_collision(self):\n if self.__ball.x <= 0 or self.__ball.x + self.__ball.width >= self.__window.width:\n self.__dx = - self.__dx\n\n next_target_top = self.__window.get_object_at(self.__ball.x + self.__dx*1.5, self.__ball.y + self.__dy*1.5)\n next_target_bot = self.__window.get_object_at(self.__ball.x + self.__ball.width + self.__dx*1.5,\n self.__ball.y + self.__ball.height + self.__dy*1.5)\n\n if self.__hit_paddle(next_target_top) or self.__hit_paddle(next_target_bot):\n self.__dy = - abs(self.__dy)\n if self.__ball.x <= self.__paddle.x + 20:\n # The ball will fly left if hit the left of the paddle\n self.__dx = - abs(self.__dx)\n elif self.__ball.x > self.__paddle.x + self.__paddle.width - 20:\n # The ball will fly right if hit the right of the paddle\n self.__dx = abs(self.__dx)\n elif self.__hit_bricks(next_target_top) or self.__hit_bricks(next_target_bot):\n target_brick = next_target_top if next_target_top else next_target_bot\n self.__remove_brick(target_brick)\n self.__dy = - self.__dy\n elif self.__ball.y <= 0:\n self.__dy = - self.__dy\n elif self.__ball.y + self.__ball.height >= self.__window.height:\n self.__num_lives -= 1\n self.__playing = False\n self.__set_ball_position()\n self.__set_paddle_position()\n self.__set_ball_velocity()\n self.__set_record_board()", "def _catch(self, ball):\n # Work out the position of the ball relative to the paddle.\n pos = (ball.rect.bottomleft[0] - self.game.paddle.rect.topleft[0],\n -ball.rect.height)\n ball.anchor(self.game.paddle, pos)", "def mouseReleaseEvent(self, ev):\n super(PlotObject, self).mouseReleaseEvent(ev)\n if self._downpos == ev.pos():\n x = ev.pos().x()\n y = ev.pos().y()\n if ev.button() == 2 :\n self.mPosition()\n elif ev.button() == 1:\n x = x - self.width() / 2\n y = y - self.height() / 2\n #self.pan(-x, -y, 0, relative=True)\n print(self.opts['center'])\n print(x,y)\n self._prev_zoom_pos = None\n self._prev_pan_pos = None", "def find_paddle(grid):\n for x in range(X_COLS):\n if grid[x][CURSOR_ROW] == 3:\n paddle_x = x\n\n return paddle_x", "def updatePaddle(self, selfinput):\n assert isinstance(selfinput,GInput)\n position = 0\n \n if selfinput.is_key_down('right'):\n position = 5\n if selfinput.is_key_down('left'):\n position = -5\n \n self._paddle.move(position)", "def update(self):\n\n\t\tself.x = games.mouse.x\n\t\tself.y = games.mouse.y\n\t\tself.check_collide()", "def update(self):\r\n if self.right > games.screen.width or self.left < 0:\r\n self.dx = -self.dx\r\n \r\n if self.bottom > games.screen.height or self.top < 0:\r\n self.dy = -self.dy", "def onMouseMove(self,mouseEvent):\n\t\tself.canvas.drawEdgeTo(mouseEvent.x,mouseEvent.y)", "def shorten_paddle_exec(self):\n if self.shorten_paddle_count == 0 and self.glitch_count == 1:\n self.window.remove(self.paddle)\n self.paddle = GRect(self.paddle_width-20, self.paddle_height, x=(self.window_width - self.paddle_width) / 2,\n y=self.window_height - self.paddle_offset)\n self.paddle.color = 'magenta'\n self.paddle.filled = True\n self.paddle.fill_color = 'magenta'\n self.window.add(self.paddle)\n self.glitch_count += 1\n elif 0 < self.shorten_paddle_count <= 5:\n pass\n elif self.shorten_paddle_count > 5:\n self.window.remove(self.paddle)\n self.paddle = GRect(self.paddle_width, self.paddle_height, x=(self.window_width - self.paddle_width) / 2,\n y=self.window_height - self.paddle_offset)\n self.paddle.color = 'black'\n self.paddle.filled = True\n self.paddle.fill_color = 'black'\n self.window.add(self.paddle)\n self.shorten_paddle_count = 0\n self.shorten_paddle_exist = False\n self.shorten_paddle_start = False\n self.glitch_count = 1", "def mouseMoveEvent(self, event):\n\n margin = self.geometry.height()/6\n # The formula below makes the topmost y-coordinate 100, and the bottommost y-coordinate 0.\n \n if event.y() < (5*self.geometry.height()/6) and event.y() > (self.geometry.height()/6):\n if event.x() > (self.geometry.width()/2-150) and event.x() < (self.geometry.width()/2+25):\n self.mouseY = round(100/(margin-(self.geometry.height()-margin)) * (event.y()-margin) + 100)\n #self.mouseY = (100/(margin-(self.geometry.height()-margin)) * (event.y()-margin) + 100)\n\n self.numLabel.setText(str(self.mouseY))", "def leave_win_game(self):\n self.end = True\n self.canevas.config(bg='black')\n self.canevas.itemconfig(self.ball.ball, fill='black')\n self.canevas.itemconfig(self.paddle.paddle, fill='black')\n self.canevas.update()\n time.sleep(2)\n self.canevas.config(bg='light blue')\n self.canevas.itemconfig(self.ball.ball, fill='red')\n self.canevas.itemconfig(self.paddle.paddle, fill='grey')\n self.brick.next_level()", "def on_mouse_press(self, x, y, button, modifiers):\n self.add_wall()", "def CheckEdgeDrop(window, docks, pt):\r\n\r\n screenPt = window.ClientToScreen(pt)\r\n clientSize = window.GetClientSize()\r\n frameRect = GetInternalFrameRect(window, docks)\r\n\r\n if screenPt.y >= frameRect.GetTop() and screenPt.y < frameRect.GetBottom():\r\n if pt.x < auiLayerInsertOffset and pt.x > auiLayerInsertOffset - auiLayerInsertPixels:\r\n return wx.LEFT\r\n \r\n if pt.x >= clientSize.x - auiLayerInsertOffset and \\\r\n pt.x < clientSize.x - auiLayerInsertOffset + auiLayerInsertPixels:\r\n return wx.RIGHT\r\n \r\n if screenPt.x >= frameRect.GetLeft() and screenPt.x < frameRect.GetRight():\r\n if pt.y < auiLayerInsertOffset and pt.y > auiLayerInsertOffset - auiLayerInsertPixels:\r\n return wx.TOP\r\n \r\n if pt.y >= clientSize.y - auiLayerInsertOffset and \\\r\n pt.y < clientSize.y - auiLayerInsertOffset + auiLayerInsertPixels:\r\n return wx.BOTTOM\r\n\r\n return -1", "def check_borders(self):\n # Go Homer!\n # https://en.wikipedia.org/wiki/Torus#Flat_torus\n if self._posn.x < 0:\n self._posn.x += self._win_w\n elif self._posn.x > self._win_w:\n self._posn.x -= self._win_w\n if self._posn.y < 0:\n self._posn.y += self._win_h\n elif self._posn.y > self._win_h:\n self._posn.y -= self._win_h", "def create_paddle(self, pos):\n\n self.shape(\"square\")\n self.penup()\n self.color(\"blue\")\n self.shapesize(stretch_wid=1, stretch_len=4)\n self.setpos(pos)", "def game_over(self):\n gameover = GLabel('GAME OVER :(')\n gameover.font = '-50'\n self.window.add(gameover,x=self.window.width/6,y=self.window.height*0.666)", "def MouseClick(event):\r\n global player\r\n global winner\r\n Window.focus_set()\r\n x = event.x // 100 # convertit une coordonée pixel écran en coord grille de jeu\r\n y = event.y // 100\r\n if ( (x<0) or (x>2) or (y<0) or (y>2) ) : return\r\n \r\n print(\"clicked at\", x,y)\r\n hasPlay = Play(x,y,player) # on regarde si le joueur a jouer correctement\r\n if hasPlay:\r\n newPlayer() # dans ce cas là on change de joueur \r\n winner = Victoire()\r\n if (winner or MatchNul()):\r\n Dessine(winner)\r\n Window.update()\r\n Window.after(3000)\r\n ResetGame(winner)\r\n Dessine(winner)\r\n return\r\n Dessine(winner)\r\n if hasPlay: # si le joueur a bien joué, alors c'est au tour de l'ia\r\n Window.update()\r\n Window.after(3000)\r\n thisIsIA()", "def mouse_left_up(self):\n pass", "def detectWallCollision(self):\n if self.right >= GAME_WIDTH or self.left <= 0:\n self._vx = -1.0 * self._vx\n if self.top >= GAME_HEIGHT:\n self._vy = -1.0 * self._vy", "def update(self):\r\n if self.right > games.screen.width: #or self.left < 0:\r\n self.dx = -self.dx\r\n\r\n if self.bottom > games.screen.height or self.top < 0:\r\n self.dy = -self.dy\r\n\r\n self.handle_collide()\r\n\r\n if self.left < 60:\r\n self.end_game()", "def leftButtonUp(self):\n\t\tautopy.mouse.toggle(False,autopy.mouse.LEFT_BUTTON)", "def drawPaddle(self, view):\n self._paddle.draw(view)", "def grab(self, event):\n self.ypos = event.y\n self.xpos = event.x\n self.config(cursor='fleur')", "def follow_mouse(self, mouse):\n half_width = self.width() / 2\n self.left = mouse.get_x() - half_width\n self.right = mouse.get_x() + half_width", "def resetPaddle(self):\n self._paddle.setX(GAME_WIDTH/2)\n self._paddle.setSpeed(PADDLE_SPEED)", "def check_pos(self, x, y):\n if x >= WINDOWWIDTH or y >= WINDOWHEIGHT or x <=0 or y <= 0:\n return True", "def on_mouse_click(self, event):\n if not self.is_game_over:\n try:\n # i, j coordinates of the click event\n i = int(round(event.ydata))\n j = int(round(event.xdata))\n\n # Left button\n if event.button == 1 or event.button == 2:\n self.reveal(i, j)\n\n # Right button\n elif event.button == 3:\n self.flag(i, j)\n\n except (TypeError, IndexError):\n pass", "def update(self):\n # Get the current mouse position. This returns the position\n # as a list of two numbers.\n pos = pygame.mouse.get_pos()\n \n # Set the player x position to the mouse x position\n self.rect.x = pos[0]", "def jump(self):\r\n if self.grounded == True:\r\n self.vel.y = -13", "def update(self):\n if self.right > games.screen.width or self.left < 0:\n self.dx = -self.dx\n if self.top < 0 or self.bottom > games.screen.height:\n self.dy = -self.dy", "def move_inward_outward(self):\r\n\r\n if self.movement == \"inward_outward\" and self.flag_move:\r\n leftPos, topPos, rightPos, bottomPos = self.canvas.coords(self.ball)\r\n if self.size_flag:\r\n self.change_size(\"larger\")\r\n elif not self.size_flag:\r\n self.change_size(\"smaller\")\r\n # If the ball hits a wall, change inward to outward.\r\n if leftPos <= 0 or rightPos >= 400 or topPos <= 0 or bottomPos >= 400:\r\n self.size_flag = 0\r\n # If the ball size reaches 1, change outward to inward.\r\n elif self.size == 1:\r\n self.size_flag = 1\r\n self.canvas.after(50, self.move_inward_outward)", "def update(self):\n # Get the current mouse position. This returns the position\n # as a list of two numbers.\n pos = pygame.mouse.get_pos()\n\n # Set the player x position to the mouse x position\n self.rect.x = pos[0]", "def go_left(self):\n self.rect.centerx -= 9", "def handle_mouse(obj, event):\n if event:\n x = event.globalX()\n y = event.globalY()\n x_w = obj.offset.x()\n y_w = obj.offset.y()\n obj.move(x - x_w, y - y_w)", "def __init__(self, x, y):\r\n super(paddle, self).__init__(image=paddle.paddle2, x=x, y=y)\r\n self.points=games.Text(value=0, size=50, color=color.white, top=5, right=games.screen.width-5)\r\n games.screen.add(self.points)", "def move( self, event ):\n self.lastMotion = time()\n if self.follow == False: # If the follow flag is not set, motion within the widget will make the ToolTip dissapear\n self.withdraw()\n self.visible = 1\n\n root = self.parent\n root = self.parent\n\n # parent_name = self.winfo_parent()\n # root = self._nametowidget(parent_name)\n\n \n # pa = re.split(r'(\\D)', root.geometry())\n # pt = re.split(r'(\\D)', self.geometry())\n #pm = re.split(r'(\\D)', self.master.geometry())\n #print \"root: \", pa\n #print \"tool: \", self.geometry()\n #print \"pm: \", self.wdgt.geometry()\n #print \"mouse: \", event.x_root, event.y_root\n #print \"mouser: \", event.x, event.y\n \n xCan = event.x_root - self.parent.winfo_rootx()\n yCan = event.y_root - self.parent.winfo_rooty()\n #print \"mouser2: \", xCan, yCan\n \n \n \n #if pa[5] == '-':\n # limit_x = int(pa[0]) - int(pa[6]) \n # print \"minus\"\n #else:\n #limit_x = int(pa[0]) + int(pa[4]) \n #if root.state() == 'zoomed':\n # limit_x = int(pa[0])\n #print \"lim: \", limit_x\n \n self.geometry( '+%i+%i' % ( event.x_root+10, event.y_root+10 ) ) # Offset the ToolTip 10x10 pixes southwest of the pointer\n \n # if xCan > (limit_x-int(pt[0])):\n # #print \"xxx\"\n # self.geometry( '+%i+%i' % ( event.x_root-int(pt[0]), event.y_root+10 ) ) # Offset the ToolTip 10x10 pixes southwest of the pointer\n # else:\n # self.geometry( '+%i+%i' % ( event.x_root+10, event.y_root+10 ) ) # Offset the ToolTip 10x10 pixes southwest of the pointer\n # try:\n # self.msgVar.set( self.msgFunc() ) # Try to call the message function. Will not change the message if the message function is None or the message function fails\n # except:\n # pass\n self.after( int( self.delay * 1000 ), self.show )", "def check_boundaries(self):\n # Checks if the enemy bar has gone of the net\n if self.rect.left <= self.settings.WINDOW_WIDTH / 2:\n self.rect.left = self.settings.WINDOW_WIDTH / 2\n self.isMovingUp = False\n\n # Checks if the enemy bar has gone out of bound to the right\n if self.rect.right >= self.settings.WINDOW_WIDTH:\n self.rect.right = self.settings.WINDOW_WIDTH\n self.isMovingUp = True", "def _move_our_paddle(self, action) -> None:\n if not isinstance(action, int):\n action = action.item() # pops the item if the action is a single tensor\n assert action in [a for a in self.action_meanings.keys()], f\"{action} is not a valid action\"\n if action == self.actions['UP']:\n if self.paddle_r.top_bound < self.top_bound:\n self.paddle_r.up()\n elif action == self.actions['DOWN']:\n if self.paddle_r.bottom_bound > self.bottom_bound:\n self.paddle_r.down()", "def move(self):\n self.val = (pygame.mouse.get_pos()[\n 0] - self.xpos - 10) / 80 * (self.maxi - self.mini) + self.mini\n if self.val < self.mini:\n self.val = self.mini\n if self.val > self.maxi:\n self.val = self.maxi", "def on_mouse_move(self, win, xpos, ypos):\n old = self.mouse\n self.mouse = (xpos, glfw.get_window_size(win)[1] - ypos)\n if glfw.get_mouse_button(win, glfw.MOUSE_BUTTON_LEFT):\n self.drag(old, self.mouse, glfw.get_window_size(win))\n if glfw.get_mouse_button(win, glfw.MOUSE_BUTTON_RIGHT):\n self.pan(old, self.mouse)", "def on_mouse_move(self, win, xpos, ypos):\n old = self.mouse\n self.mouse = (xpos, glfw.get_window_size(win)[1] - ypos)\n if glfw.get_mouse_button(win, glfw.MOUSE_BUTTON_LEFT):\n self.drag(old, self.mouse, glfw.get_window_size(win))\n if glfw.get_mouse_button(win, glfw.MOUSE_BUTTON_RIGHT):\n self.pan(old, self.mouse)", "def mouse_on_button(self, mouse) -> bool:\n return self.x + self.w > mouse[0] > self.x and self.y + self.h > mouse[1] > self.y", "def updatePaddle(self, Input):\n assert isinstance(Input,GInput)\n change = 0\n if Input.is_key_down('left'):\n if self._paddle.getX() >= (self._paddle.getWidth()/2):\n change -= self._paddle.getSpeed()\n if Input.is_key_down('right'):\n if self._paddle.getX() <= (GAME_WIDTH - (self._paddle.getWidth()/2)):\n change += self._paddle.getSpeed()\n self._paddle.setX(self._paddle.getX() + change)", "def keepInBounds(self):\n screenWidth, screenHeight = self.screen.get_size()\n\n self.pos.x = max(0, self.pos.x)\n self.pos.x = min(screenWidth, self.pos.x)\n\n self.pos.y = max(0, self.pos.y)\n self.pos.y = min(screenHeight, self.pos.y)", "def getPaddle(self):\n return Paddle()", "def Haut():\r\n X1, Y1, X2, Y2 = canvas.coords(boule)\r\n canvas.coords(boule,X1,Y1-20,X2,Y2-20)", "def callback_handle_left_mouse_motion(self, event):\n\n # TODO: update this for the case where there is no current shape id\n vector_object = self.get_vector_object(self.variables.current_shape_id)\n if self.variables.active_tool == TOOLS.PAN_TOOL:\n x_dist = event.x - self.variables.tmp_anchor_point[0]\n y_dist = event.y - self.variables.tmp_anchor_point[1]\n self.move(self.variables.image_id, x_dist, y_dist)\n self.variables.tmp_anchor_point = event.x, event.y\n elif self.variables.active_tool == TOOLS.TRANSLATE_SHAPE_TOOL:\n x_dist = event.x - self.variables.tmp_anchor_point[0]\n y_dist = event.y - self.variables.tmp_anchor_point[1]\n t_coords = self.get_shape_canvas_coords(self.variables.current_shape_id)\n new_coords = numpy.asarray(t_coords) + x_dist\n new_coords_y = numpy.asarray(t_coords) + y_dist\n new_coords[1::2] = new_coords_y[1::2]\n if vector_object.image_drag_limits:\n canvas_limits = self.image_coords_to_canvas_coords(vector_object.image_drag_limits)\n x_vertices = new_coords[0::2]\n y_vertices = new_coords[1::2]\n within_x_limits = True\n within_y_limits = True\n for x_vertex in x_vertices:\n if canvas_limits[2] < x_vertex or x_vertex < canvas_limits[0]:\n within_x_limits = False\n for y_vertex in y_vertices:\n if y_vertex < canvas_limits[1] or y_vertex > canvas_limits[3]:\n within_y_limits = False\n if not within_x_limits:\n new_coords[0::2] = t_coords[0::2]\n if not within_y_limits:\n new_coords[1::2] = t_coords[1::2]\n self.modify_existing_shape_using_canvas_coords(self.variables.current_shape_id,\n new_coords,\n update_pixel_coords=True)\n self.variables.tmp_anchor_point = event.x, event.y\n elif self.variables.active_tool == TOOLS.EDIT_SHAPE_COORDS_TOOL:\n previous_coords = self.get_shape_canvas_coords(self.variables.current_shape_id)\n coord_x_index = self.variables.tmp_closest_coord_index*2\n coord_y_index = coord_x_index + 1\n new_coords = list(previous_coords)\n new_coords[coord_x_index] = event.x\n new_coords[coord_y_index] = event.y\n if vector_object.image_drag_limits:\n drag_x_lim_1, drag_y_lim_1, drag_x_lim_2, drag_y_lim_2 = \\\n self.image_coords_to_canvas_coords(vector_object.image_drag_limits)\n if new_coords[coord_x_index] < drag_x_lim_1:\n new_coords[coord_x_index] = drag_x_lim_1\n if new_coords[coord_x_index] > drag_x_lim_2:\n new_coords[coord_x_index] = drag_x_lim_2\n if new_coords[coord_y_index] < drag_y_lim_1:\n new_coords[coord_y_index] = drag_y_lim_1\n if new_coords[coord_y_index] > drag_y_lim_2:\n new_coords[coord_y_index] = drag_y_lim_2\n\n self.modify_existing_shape_using_canvas_coords(self.variables.current_shape_id, tuple(new_coords))\n elif self.variables.active_tool == TOOLS.ZOOM_IN_TOOL:\n self.event_drag_line(event)\n elif self.variables.active_tool == TOOLS.ZOOM_OUT_TOOL:\n self.event_drag_line(event)\n elif self.variables.active_tool == TOOLS.SELECT_TOOL:\n self.event_drag_line(event)\n elif self.variables.active_tool == TOOLS.DRAW_RECT_BY_DRAGGING:\n self.event_drag_line(event)\n elif self.variables.active_tool == TOOLS.DRAW_ELLIPSE_BY_DRAGGING:\n self.event_drag_line(event)\n elif self.variables.active_tool == TOOLS.DRAW_LINE_BY_DRAGGING:\n self.event_drag_line(event)\n elif self.variables.active_tool == TOOLS.DRAW_ARROW_BY_DRAGGING:\n self.event_drag_line(event)\n elif self.variables.active_tool == TOOLS.DRAW_POINT_BY_CLICKING:\n self.modify_existing_shape_using_canvas_coords(self.variables.current_shape_id, (event.x, event.y))", "def wrap(self):\n if self.center.x > SCREEN_WIDTH:\n self.center.x = 0\n if self.center.y > SCREEN_HEIGHT:\n self.center.y = 0\n if self.center.x < 0:\n self.center.x = SCREEN_WIDTH\n if self.center.y < 0:\n self.center.y = SCREEN_HEIGHT", "def shorten_paddle_switch(self):\n for i in range(self.paddle.width):\n maybe_shorten = self.window.get_object_at(self.paddle.x + i, self.paddle.y)\n if maybe_shorten is self.shorten_paddle:\n self.window.remove(maybe_shorten)\n self.shorten_paddle_start = True", "def sidebounce(self):\r\n self.dx=-self.dx", "def mouse_middle_up(self):\n pass", "def update(self):\n pygame.event.pump()\n self.pos_x -= 1.5", "def start_game(self,event):\n if self.__dx == 0 and self.__dy == 0:\n self.set_velocity()\n self.window.remove(self.start_label)\n self.window.remove(self.double_score_label)\n self.window.remove(self.shorten_paddle_label)\n else:\n pass", "def mouseDragged():\n if mousePressed:\n mousePressed()", "def check_contained(self,x,y):\n if self.active:\n self.reset()\n #if in horizontal bounds\n if x > self.left and x < self.right:\n slope = 1/sqrt(3)\n #use to set create verticle bounds\n if x - self.center_x <= 0:\n slope *= -1\n\n ################\n x_rel = x - self.center_x #bounds depends on x location of the mouse \n bottom_bound = self.bottom - (x_rel*slope)\n top_bound = self.top - (x_rel*-slope)\n ################\n\n if y >= top_bound and y <= bottom_bound:\n if Ctrl_Vars.Left_MouseDown:\n self.press() # if all conditions are met use functionality", "def gameover(self):\n self.Display.blit(self.GameOver,[205-self.GameOver.get_width()//2,150-self.GameOver.get_height()//2])\n pygame.draw.rect(self.Display,[200,97,48],(60,230,290,70))\n pygame.draw.rect(self.Display,[200,97,48],(60,320,290,70))\n pygame.draw.rect(self.Display,[106,106,150],(65,235,280,60))\n pygame.draw.rect(self.Display,[106,106,150],(65,325,280,60))\n pygame.display.update()\n self.Display.blit(self.Newgame,[205-self.Newgame.get_width()//2,265-self.Newgame.get_height()//2])\n self.Display.blit(self.Exit,[205-self.Exit.get_width()//2,355-self.Exit.get_height()//2])\n pygame.display.update()\n while True:\n x,y=pygame.mouse.get_pos()\n if x>64 and x<345:\n if y>234 and y<295:\n pygame.draw.rect(self.Display,[200,97,48],(65,235,280,60))\n self.Display.blit(self.Newgame,[205-self.Newgame.get_width()//2,265-self.Newgame.get_height()//2])\n else:\n pygame.draw.rect(self.Display,[106,106,150],(65,235,280,60))\n self.Display.blit(self.Newgame,[205-self.Newgame.get_width()//2,265-self.Newgame.get_height()//2])\n if y>324 and y<386:\n pygame.draw.rect(self.Display,[200,97,48],(65,325,280,60))\n self.Display.blit(self.Exit,[205-self.Exit.get_width()//2,355-self.Exit.get_height()//2])\n else:\n pygame.draw.rect(self.Display,[106,106,150],(65,325,280,60))\n self.Display.blit(self.Exit,[205-self.Exit.get_width()//2,355-self.Exit.get_height()//2])\n pygame.display.update()\n for event in pygame.event.get():\n if event.type==pygame.QUIT:\n self.Game.dontSave()\n return\n elif event.type==pygame.MOUSEBUTTONDOWN:\n x,y=pygame.mouse.get_pos()\n if x>64 and x<345:\n if y>234 and y<296:\n return 'newgame'\n elif y>324 and y<386:\n self.Game.dontSave()\n return", "def update(self):\n self.on_left = (abs(self.x - self.maze.LEFT_VERT) <\n self.WALL_TOLERANCE)\n self.on_right = (abs(self.x - self.maze.RIGHT_VERT) <\n self.WALL_TOLERANCE)\n\n self.on_top = (abs(self.y - self.maze.TOP_HORIZ) <\n self.WALL_TOLERANCE)\n self.on_bottom = (abs(self.y - self.maze.BOTTOM_HORIZ) <\n self.WALL_TOLERANCE)\n self.x = self.x + self.x_add\n self.y = self.y + self.y_add\n\n # If the game is over and Pacman wins, stop moving\n if self.gc.player_wins:\n self.x_add = 0\n self.y_add = 0\n # If the game is over and Pacman loses, top moving and\n # Open mouth to oblivion.\n elif self.gc.pinky_wins:\n self.open_close = 1\n self.x_add = 0\n self.y_add = 0\n self.mouth_angle = self.mouth_angle + self.open_close\n else:\n if self.mouth_angle < 45:\n self.open_close = self.mouth_speed\n self.mouth_angle = self.mouth_angle + self.open_close\n if self.mouth_angle < 0:\n self.open_close = - self.mouth_speed\n self.mouth_angle = self.mouth_angle + self.open_close\n else:\n while self.mouth_angle > 0:\n self.open_close = - self.mouth_speed\n self.mouth_angle = self.mouth_angle + self.open_close\n self.eat_dots()", "def stay_on_screen(self):\n if self.x <= 0 + SHIP_WIDTH/2:\n self.x += SHIP_MOVEMENT\n if self.x >= GAME_WIDTH- SHIP_WIDTH/2:\n self.x -= SHIP_MOVEMENT", "def __init__(self):\n \n self._wall = BrickWall() \n self._paddle = GRectangle(\n x=GAME_WIDTH/2 - PADDLE_WIDTH/2,\n y=PADDLE_OFFSET,\n width=PADDLE_WIDTH,\n height=PADDLE_HEIGHT,\n fillcolor = PADDLE_COLOR)\n self._clickdist = 0\n self._ball = Ball() \n self._last = None\n self._tries = 2\n self._lostlife = False", "def leftButtonDown(self):\n\t\tautopy.mouse.toggle(True,autopy.mouse.LEFT_BUTTON)", "def FlyOut(self):\r\n\r\n if self._fly_timer.IsRunning():\r\n return\r\n\r\n if _VERSION_STRING < \"2.9\":\r\n leftDown = wx.GetMouseState().LeftDown()\r\n else:\r\n leftDown = wx.GetMouseState().LeftIsDown()\r\n\r\n if leftDown:\r\n return\r\n \r\n rect = wx.Rect(*self.GetScreenRect())\r\n rect.Inflate(10, 10)\r\n\r\n if rect.Contains(wx.GetMousePosition()):\r\n if not self._fly:\r\n return\r\n self._send_size = False\r\n self._fly_timer.Start(5)\r\n else:\r\n if self._fly:\r\n return\r\n self._send_size = False\r\n self._fly_timer.Start(5)", "def nail_in(self):\n if not self.in_wall:\n self.in_wall = True" ]
[ "0.8009978", "0.70204043", "0.70202464", "0.693563", "0.66550136", "0.6435849", "0.6397138", "0.6362361", "0.63407254", "0.63133556", "0.6291798", "0.6267906", "0.6251655", "0.6229885", "0.61976296", "0.6141946", "0.6129163", "0.6072724", "0.6051235", "0.6037957", "0.59937733", "0.5985066", "0.5975724", "0.5958349", "0.5957113", "0.5918765", "0.5862096", "0.58347577", "0.58176565", "0.58042675", "0.5793452", "0.5772719", "0.57651687", "0.5741006", "0.5736085", "0.5733998", "0.5700545", "0.5696404", "0.56896037", "0.56880414", "0.5680297", "0.56761205", "0.56626844", "0.5658738", "0.5637195", "0.5636999", "0.56278455", "0.5611031", "0.55886596", "0.55882615", "0.5587091", "0.558538", "0.55715144", "0.5566269", "0.5562769", "0.5554452", "0.55458474", "0.5538491", "0.5517872", "0.55091745", "0.5500781", "0.54869854", "0.5481534", "0.5477606", "0.5472381", "0.5469632", "0.54622746", "0.5452784", "0.5450269", "0.54454356", "0.54322886", "0.5429528", "0.54290456", "0.5415826", "0.54155934", "0.54147", "0.54092616", "0.539881", "0.539881", "0.5397194", "0.53927004", "0.5375059", "0.5371489", "0.5371295", "0.5368306", "0.5368005", "0.53604823", "0.5359062", "0.5356148", "0.53532535", "0.5352579", "0.5349883", "0.53455013", "0.5345258", "0.5343139", "0.5337954", "0.53371394", "0.53192276", "0.53173333", "0.53131026" ]
0.78187925
1
The first click (or revived click) starts the game, and the others no impact. onmouseclicked(self.click_check)
def click_check(self, mouse): self.cc += 1 return self.cc
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def start_game():\n logger.info(\"Clicking play button\")\n mouseclick(coords_play_final_button[0], coords_play_final_button[1])", "def play(self, event):\n if self.num_clicks == 1:\n self.clickable(event)\n if len(self.canvas.find_withtag(\"selected\")) == 2:\n self.num_of_tries += 1\n print(f'Number of tries {self.num_of_tries}')\n if self.num_of_tries > 13:\n self.score -= 10\n self.score_label.config(text=f'Score: {self.score}')\n self.check_match(self.click_tiles)\n self.canvas.after(self.delay, self.flip_back)\n self.click_tiles.clear()\n self.num_clicks = 0\n else:\n self.clickable(event)", "def primeiro_click(self):\n self.timer += self.game.janela.delta_time()\n clicked = self.mouse.is_button_pressed(1)\n if self.timer >= self.min_time and clicked:\n mouse_x, mouse_y = self.mouse.get_position()\n if mouse_x > self.running.largura_tabuleiro or mouse_y < self.running.top_bar: return\n self.selecionar(mouse_x, mouse_y)\n self.running.current_state = Running_Select_2(self.game, self.running)\n return", "def game_click(coord):\n mouseclick(coord[0], coord[1])\n time.sleep(0.5)", "def on_mouse_click(self, event):\n if not self.is_game_over:\n try:\n # i, j coordinates of the click event\n i = int(round(event.ydata))\n j = int(round(event.xdata))\n\n # Left button\n if event.button == 1 or event.button == 2:\n self.reveal(i, j)\n\n # Right button\n elif event.button == 3:\n self.flag(i, j)\n\n except (TypeError, IndexError):\n pass", "def first_move(self, first_click_tile):\r\n\r\n self.is_new_game = False\r\n self.board.first_click(first_click_tile)\r\n self.timer.init_clock()", "def click(self, event):\n\t\t\n\t\t# Get play (mouse click)\n\t\tx, y = int(event.x/self.squareSize), int(event.y/self.squareSize)\n\t\t# If the square clicked is empty, does a movement\n\t\tif(self.board[x][y] == 0 and self.isMyTurn):\n\t\t\tself.board[x][y] = self.player\n\t\t\tself.draw(x,y, self.player)\n\t\t\tself.sock.send(bytes('%d%d'%(x,y), encoding='utf-8'))\n\t\t\tself.isMyTurn = False", "def on_click(self, evt):\n obj = self.scene.mouse.pick\n if obj == self.start_button_local:\n self.started = True\n return 'local'\n elif obj == self.start_button_remote and self.remote_enabled:\n self.started = True\n return 'remote'", "def on_click(self) -> None:\n self.cycle()", "def segundo_click(self):\n self.timer += self.game.janela.delta_time()\n clicked = self.mouse.is_button_pressed(1)\n if self.timer >= self.min_time and clicked:\n self.timer = 0\n mouse_x, mouse_y = self.mouse.get_position()\n if mouse_x > self.running.largura_tabuleiro or mouse_y < self.running.top_bar:\n self.deselecionar()\n self.running.current_state = Running_No_Select(self.game, self.running)\n return\n self.running.segundo_tile_ = [mouse_x // self.running.x_space, (mouse_y - self.running.top_bar ) // self.running.y_space]\n if self.running.primeiro_tile_ == self.running.segundo_tile_:\n self.deselecionar()\n self.running.current_state = Running_No_Select(self.game, self.running)\n return\n vizinhos = self.lista_de_vizinhos(self.running.primeiro_tile_)\n for v in vizinhos:\n if self.running.segundo_tile_ == v:\n self.swap()\n return\n self.selecionar()\n return", "def check_game_mode_button(self, mouse_x, mouse_y):\r\n for button in self._play_mode_button_list:\r\n if button.get_button_rect().collidepoint(mouse_x, mouse_y):\r\n button_clicked = button\r\n break\r\n else:\r\n button_clicked = None\r\n\r\n if button_clicked is not None and \\\r\n self._stats.get_status() == \"Start_game\":\r\n self.start_game(button_clicked.get_num_atom())", "def check_events(self):\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n exit()\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n position = pygame.mouse.get_pos()\n self.left_click(position)", "def on_mouse_press(self, x, y, button, modifiers):\n\n # Change states as needed.\n if self.current_state == GAME_RUNNING_PAGE:\n pass\n else:\n # Restart the game.\n self.setup()\n self.score=0\n self.current_state = GAME_RUNNING_PAGE", "def MouseClick(event):\r\n global player\r\n global winner\r\n Window.focus_set()\r\n x = event.x // 100 # convertit une coordonée pixel écran en coord grille de jeu\r\n y = event.y // 100\r\n if ( (x<0) or (x>2) or (y<0) or (y>2) ) : return\r\n \r\n print(\"clicked at\", x,y)\r\n hasPlay = Play(x,y,player) # on regarde si le joueur a jouer correctement\r\n if hasPlay:\r\n newPlayer() # dans ce cas là on change de joueur \r\n winner = Victoire()\r\n if (winner or MatchNul()):\r\n Dessine(winner)\r\n Window.update()\r\n Window.after(3000)\r\n ResetGame(winner)\r\n Dessine(winner)\r\n return\r\n Dessine(winner)\r\n if hasPlay: # si le joueur a bien joué, alors c'est au tour de l'ia\r\n Window.update()\r\n Window.after(3000)\r\n thisIsIA()", "def handle_left_click(self):\n if not self.game_in_progress:\n return\n if self.first_click:\n self.first_click = False\n self.timer.start(1000)\n sender = self.sender()\n row = 0\n col = 0\n for row in range(self.rows):\n for col in range(self.cols):\n if self.button_array[row][col] == sender:\n break\n else:\n continue\n break\n # print 'Received left click:', row, ',', col\n celllist = self.board.opencell(row, col)\n if celllist == []:\n return\n for cell in celllist:\n row = cell[0]\n col = cell[1]\n cell_property = self.board.getcellproperty(row, col)\n if cell_property == CellProperty.Empty:\n self.button_array[row][col].setIcon(QtGui.QIcon(\"icons/OpenedSquare.png\"))\n elif cell_property == CellProperty.Mine:\n # Game over\n for row in range(self.rows):\n for col in range(self.cols):\n cell_property = self.board.getcellproperty(row, col)\n if cell_property == CellProperty.Mine:\n self.button_array[row][col].setIcon(QtGui.QIcon(\"icons/mine.ico\"))\n self.status_button.setIcon(QtGui.QIcon(\"icons/smiley3.ico\"))\n self.game_in_progress = False\n self.timer.stop()\n return\n elif cell_property == CellProperty.MineCountOne:\n self.button_array[row][col].setIcon(QtGui.QIcon(\"icons/1.png\"))\n elif cell_property == CellProperty.MineCountTwo:\n self.button_array[row][col].setIcon(QtGui.QIcon(\"icons/2.png\"))\n elif cell_property == CellProperty.MineCountThree:\n self.button_array[row][col].setIcon(QtGui.QIcon(\"icons/3.png\"))\n elif cell_property == CellProperty.MineCountFour:\n self.button_array[row][col].setIcon(QtGui.QIcon(\"icons/4.png\"))\n elif cell_property == CellProperty.MineCountFive:\n self.button_array[row][col].setIcon(QtGui.QIcon(\"icons/5.png\"))\n elif cell_property == CellProperty.MineCountSix:\n self.button_array[row][col].setIcon(QtGui.QIcon(\"icons/6.png\"))\n elif cell_property == CellProperty.MineCountSeven:\n self.button_array[row][col].setIcon(QtGui.QIcon(\"icons/7.png\"))\n elif cell_property == CellProperty.MineCountEight:\n self.button_array[row][col].setIcon(QtGui.QIcon(\"icons/8.png\"))\n\n game_status = self.board.continuegame()\n print 'Game Status:', game_status\n if game_status == GameStatus.GameWon:\n self.timer.stop()\n self.game_in_progress = False\n player_name = QtGui.QInputDialog.getText(self, \"Name Please !!\",\\\n \"Enter your name for leader board:\")\n # TODO: Replace 1 with the time taken by the end user.\n LeaderBoard.insertnewscore(CURRENT_GAME_LEVEL, player_name[0], self.time)\n self.status_button.setIcon(QtGui.QIcon(\"icons/smiley.ico\"))\n print \"You have won the game\"", "def LeftClick(self):\n self._PressLeftButton()\n self._ReleaseAllButtons()", "def __check_if_got_pressed(self):\n mouse_x_pos,mouse_y_pos = pg.mouse.get_pos()\n\n if utilitiez.on_object(self.rect.x, self.rect.y, self.rect.width, self.rect.height, mouse_x_pos, mouse_y_pos,\n MOUSE_WIDTH, MOUSE_HEIGHT):\n self.__on_click()", "def wait_for_start(self):\n while True:\n ev = self.scene.waitfor('click')\n game_type = self.on_click(ev)\n if game_type:\n return game_type", "def on_run_clicked(self):\n self.start_threading()\n self.stepping = False\n self.step_event.set()", "def input(self, event: pygame.event) -> None:\n if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:\n self.user_clicked = True", "def click(self):\r\n pass", "def check_events(self):\r\n for event in pygame.event.get():\r\n if event.type == pygame.QUIT:\r\n sys.exit()\r\n elif event.type == pygame.MOUSEBUTTONDOWN:\r\n mouse_x, mouse_y = pygame.mouse.get_pos()\r\n if self._stats.get_status() == \"Start_game\":\r\n self.check_game_mode_button(mouse_x, mouse_y)\r\n elif self._stats.get_status() == \"replay\":\r\n self.check_replay_button(mouse_x, mouse_y)\r\n else:\r\n self.check_click(mouse_x, mouse_y)", "def on_mouse_press(self, x, y, button, modifiers):\n\n # Change states as needed.\n if self.current_state == INSTRUCTIONS_PAGE:\n # Next page of instructions.\n self.current_state = GAME_RUNNING\n # Start the game\n self.setup()\n self.current_state = GAME_RUNNING\n elif self.current_state == GAME_OVER:\n # Restart the game.\n self.setup()\n self.current_state = GAME_RUNNING", "def check_play_button(ai_settings, screen, stats, sb, play_button, ship, aliens, bullets, mouse_x, mouse_y):\n button_clicked = play_button.rect.collidepoint(mouse_x, mouse_y)\n if button_clicked and not stats.game_active:\n ai_settings.initialize_dynamic_settings()\n #hiding mouse cursor\n start_game(ai_settings, screen, stats, ship, aliens, bullets)\n\n sb.prep_score()\n sb.prep_high_score()\n sb.prep_level()\n sb.prep_ships()", "def checkClick(self, pos):\r\n self.dice.checkClick(pos)\r\n self.turnchanger.checkClick(pos)\r\n for point in self.points:\r\n point.checkClick(pos)", "def press(self):\n self.clicked = True\n if self.command:\n self.command(self.name)", "def check_clicked(self, events):\n x = self.x\n y = self.y\n xsize = self.xsize\n ysize = self.ysize\n (a, b) = pygame.mouse.get_pos()\n if a>x and b>y and a<x+xsize and b<y+ysize:\n for event in events:\n if event.type == pygame.MOUSEBUTTONDOWN:\n self.clickedAction(events)\n self.clicked = True\n return self.clicked", "def _check_play_button(self, mouse_pos):\n button_clicked = self.play_button.rect.collidepoint(mouse_pos)\n if button_clicked and not self.stats.game_active:\n # Reset the game settings.\n self.settings.initialize_dynamic_settings()\n # Reset the game statistics.\n self.stats.reset_stats()\n self.stats.game_active = True\n self.sb.prep_score()\n self.sb.prep_level()\n self.sb.prep_ships()\n # Get rid of any remaining stars and bullets.\n self.stars.empty()\n self.bullets.empty()\n # Create a new galaxy and center the ship.\n self._create_galaxy()\n self.ship.center_ship()\n pygame.mouse.set_visible(False)", "def _check_play_button(self, mouse_pos):\n # checking if button is clicked while there's no game active\n # else the button would be clickable even after turning invisible\n button_clicked = self.play_button.rect.collidepoint(mouse_pos)\n if button_clicked and not self.stats.game_active:\n # resets games speed\n self.settings.initialize_dynamic_settings()\n\n # reset stats / level / ships and changing game state\n self.stats.reset_stats()\n self.stats.game_active = True\n self.sb.prep_score()\n self.sb.prep_level()\n self.sb.prep_ships()\n\n # getting rid of alien ships and bullets\n self.aliens.empty()\n self.bullets.empty()\n\n # creating new fleet and centering players ship\n self._create_fleet()\n self.ship.center_ship()\n\n # making mouse pointer invisible\n pygame.mouse.set_visible(False)", "def _check_play_button(self, mouse_pos): \n button_clicked = self.play_button.rect.collidepoint(mouse_pos)\n if button_clicked and not self.stats.game_active:\n # Reset game settings\n self.settings.initialize_dynamic_settings()\n\n # Reset game stats\n self.stats.reset_stats()\n self.stats.game_active = True\n self.sb.prep_score()\n self.sb.prep_level()\n self.sb.prep_ships()\n\n # Remove any remaining aliends and bullets\n self.aliens.empty() \n self.bullets.empty()\n\n # Create new fleet and center the ship\n self._create_fleet()\n self.ship.center_ship() \n\n # Hide the mouse cursor when inside of game window\n pygame.mouse.set_visible(False)", "def on_click(self, event):\n if self.click_job is not None:\n self.after_cancel(self.click_job)\n item = self.identify(\"item\", event.x, event.y)\n if item:\n self.click_job = self.after(200, self.clicked, item)\n return \"break\"", "def cases_app_opening():\n run = True\n click = False\n clock = pygame.time.Clock()\n\n cooldown = 0\n\n # BUTTON INITIALIZATION\n B_INV_BACK = pygame.Rect(190, 205, 55, 55)\n B_INV_FORWARD = pygame.Rect(255, 205, 55, 55)\n B_TRIP_LABEL = pygame.Rect(110, 185, 280, 75)\n B_TRIP_CASE = pygame.Rect(155, 270, 190, 150)\n B_CLASS_LABEL = pygame.Rect(110, 445, 280, 75)\n B_CLASS_CASE = pygame.Rect(155, 530, 190, 150)\n B_BACK = pygame.Rect(20, 20, 60, 60)\n B_MENU = pygame.Rect(WIDTH - 65 - 20, 20, 60, 60)\n\n while run:\n pos_x, pos_y = pygame.mouse.get_pos()\n user_settings = return_user_settings()\n BG = (\n BG_CASEKY_VOLBA_L\n if user_settings[\"theme\"] == \"light\"\n else BG_CASEKY_VOLBA_D\n )\n FONT_COLOR = FONT_COLOR_L if user_settings[\"theme\"] == \"light\" else FONT_COLOR_D\n WIN.blit(BG, (0, 0))\n WIN.blit(BACK, (20, 20))\n WIN.blit(MENU, (WIDTH - 65 - 20, 20))\n\n # LABELS\n label_trip_col = MAIN_FONT.render(\"Trip Collection\", 1, FONT_COLOR)\n WIN.blit(label_trip_col, (WIDTH_H - 75, 205))\n label_class_col = MAIN_FONT.render(\"Class Collection\", 1, FONT_COLOR)\n WIN.blit(label_class_col, (WIDTH_H - 75, 465))\n\n # Zistovanie, ci nebolo kliknute na textove pole\n if click and cooldown == 0:\n if B_BACK.collidepoint(pos_x, pos_y):\n run = False\n elif B_MENU.collidepoint(pos_x, pos_y):\n settings_menu()\n elif B_TRIP_CASE.collidepoint(pos_x, pos_y) or B_TRIP_LABEL.collidepoint(\n pos_x, pos_y\n ):\n case_opening_app(case_type=\"trip\")\n elif B_CLASS_CASE.collidepoint(pos_x, pos_y) or B_CLASS_LABEL.collidepoint(\n pos_x, pos_y\n ):\n case_opening_app(case_type=\"class\")\n cooldown = FPS // 3\n\n # Event handling\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.QUIT:\n run = False\n if event.type == pygame.MOUSEBUTTONDOWN:\n if event.button == 1:\n click = True\n if event.type == pygame.MOUSEBUTTONUP:\n if event.button == 1:\n click = False\n if cooldown != 0:\n cooldown -= 1\n\n pygame.display.update()\n clock.tick(FPS)", "def on_step_clicked(self):\n self.start_threading()\n self.stepping = True\n self.step_event.set()", "def click(self, x, y, button, press):\n\n if self.is_in_screen(x, y) and not self.pause:\n self.get_color(x, y)\n self.record(x, y, button, press)", "def on_click(self):\n arcade.play_sound(button, volume=constants.MUSIC_VOLUME / 40)\n\n global success\n global fails\n if success or fails == 20:\n reset_global_variables()\n self.minigame.window.show_view(self.minigame.main_view)\n else:\n self.minigame.window.show_view(self.minigame.main_view)\n print(f\"Exit Button.\")", "def __on_click(self):\n if self.enable:\n self.__function_to_activate()", "def start_playing(self, mouse):\n self.start = True", "def double_clicked(mouse):\n global state, current_action\n\n smallest_element = get_element(mouse)\n\n with data_lock:\n if smallest_element:\n state = 0\n current_action = wtl.actions.Click(wtl.Selector(f'[wtl-uid=\"{smallest_element.wtl_uid}\"]'))", "def check_replay_button(self, mouse_x, mouse_y):\r\n for button in self._replay_button_list:\r\n if button.get_button_rect().collidepoint(mouse_x, mouse_y):\r\n button_clicked = button\r\n break\r\n else:\r\n button_clicked = None\r\n\r\n if button_clicked is not None and button_clicked.get_num_atom() == 1:\r\n self.setup_new_game()\r\n elif button_clicked is not None and button_clicked.get_num_atom() == 2:\r\n sys.exit()", "def click(self, event):\n if self.segs == []:\n startCircle = self.findInter(event.x, event.y)\n if startCircle:\n xa, ya, xb, yb = self.can.coords(startCircle)\n self.firstCoords = ((xa + xb)/2, (ya + yb)/2)\n if not self.helpShown:\n self.showHelp()", "def isPieceClicked(self):\r\n if self.clickedPiece is None:\r\n return False\r\n return True", "def click_callback(self, event):\n # print(\"clicked at \", event.x+self.offset_x, event.y+self.offset_y)\n # x = string.ascii_lowercase[math.ceil((event.x + self.offset_x) / self.width) - 1]\n # y = (math.ceil((event.y + self.offset_y) / self.width) - 9) * -1\n self.clear_moves_on_canvas()\n\n x = math.ceil((event.x + self.offset_x) / self.width) - 1\n y = math.ceil((event.y + self.offset_y) / self.width) - 1\n\n if 0 <= x < 8 and 0 <= y < 8:\n board_value = self.game.board[x][y]\n if self.moving:\n # check if second click isn't on another piece\n if board_value != \"\" and board_value[0] == self.game.current_player_color:\n self.calculate_moves_for_moving_piece(x, y)\n else:\n self.move_piece(x, y) # method moves moving_piece\n self.moving = False\n else:\n self.calculate_moves_for_moving_piece(x, y) # method sets moving_piece", "def on_click(self, x, y):\n mul_x, mul_y = self.multiplier\n off_x, off_y = self.offset\n x -= off_x\n x /= mul_x\n y -= off_y\n y /= mul_y\n for button in self.button_dict.values():\n button.check_click(x, y)", "def on_click(self) -> None:\n pass", "def _click(self):\n if hasattr(self.canvas[\"items\"][self.index], 'commandFunc'):\n self.canvas[\"items\"][self.index].commandFunc(None)", "def _inactive(self):\n self._click()\n if self._last is None and self._touch is not None:\n self._state = STATE_COUNTDOWN\n self._game = Gameplay()\n self._last = self._touch", "def on_mouse_press(self, _x, _y, _button, _modifiers):\n game_view = GameView()\n #game_view.setup()\n game_view.setup(level=1)\n\n #self.music_gameover.stop(self.player_music_gameover)\n\n\n try:\n self.music_gameover.stop(self.player_music_gameover)\n except ValueError:\n print(\"music already finished\")\n\n self.window.show_view(game_view)", "def _click(self):\n self._touch = self.view.touch", "def click(event):\r\n global score, targets_left, have_friend_param\r\n flag = 0\r\n mult = event.button\r\n for i in range(num_of_balls + 1):\r\n if balls_pool[i][6] > 0 and (event.pos[0] - balls_pool[i][0]) ** 2 + (event.pos[1] - balls_pool[i][1]) ** 2 <= \\\r\n balls_pool[i][2] ** 2:\r\n if i == 0:\r\n score += mult * max_rad * (1 + have_friend_param)\r\n screen.fill(YELLOW)\r\n else:\r\n score += mult * (max_rad + min_rad - balls_pool[i][2]) * (1 + have_friend_param)\r\n balls_pool[i][6] -= 1 * mult\r\n if balls_pool[i][6] <= 0:\r\n targets_left -= 1\r\n flag = 1\r\n\r\n if not flag:\r\n score -= mult * (max_rad + min_rad) // 10", "def run(self):\n self.soundtrack.play(-1, 0, 2000)\n pygame.time.set_timer(Game.REFRESH_EVENT, 1000 // Game.FPS)\n\n while 1 < 2:\n event = pygame.event.wait()\n\n # Android-specific: always be ready to sleep\n if android:\n if android.check_pause():\n android.wait_for_resume()\n\n # Refresh display\n if event.type == Game.REFRESH_EVENT:\n # Android-specific: keep the soundtrack playing\n if android:\n android.mixer.periodic()\n\n self.draw()\n self.physics()\n pygame.display.flip()\n\n # The announcement is over---start playing\n elif event.type == Game.ANNOUNCE_EVENT:\n pygame.time.set_timer(Game.ANNOUNCE_EVENT, 0)\n self.state = Game.PLAY_STATE\n\n # The congratulations is over---announce new target\n elif event.type == Game.BRAVO_EVENT:\n pygame.time.set_timer(Game.BRAVO_EVENT, 0)\n self.announce_target()\n\n # The user clicked somewhere\n elif event.type == pygame.MOUSEBUTTONDOWN \\\n and self.state != Game.BRAVO_STATE:\n self.clicked(event.pos)\n\n # The user hit escape (or back); quit\n elif event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:\n break", "def click(self, mouse_pos):\n for button in self.enabled_buttons(): # type: Button\n if button.is_position_on_button(mouse_pos):\n self.sound.play_sound(self.click_sound)\n button.click()", "def check_click(self, mouse_x, mouse_y):\r\n # Change the x/y screen coordinates to grid coordinates\r\n column = mouse_x // 70\r\n row = mouse_y // 70\r\n\r\n if row in [0, 9] or column in [0, 9]:\r\n self.shoot_ray(row, column)\r\n elif 0 < row < 9 and 0 < column < 9:\r\n self.guess_atom(row, column)", "def click(self, mousepos):\n if self.currentplayer == 1:\n if isinstance(self.player1, Computer):\n return\n else:\n self.player1.move = self.board.findClickedBox(mousepos)\n if self.currentplayer == -1:\n if isinstance(self.player2, Computer):\n return\n else:\n self.player2.move = self.board.findClickedBox(mousepos)", "def click(self, event):\n x = self.ptgrid(event.x)\n y = self.ptgrid(event.y)\n \n # x = loc[0]\n # y = loc[1]\n\n # if self.gamestate == self.STATE_TITLE_SCREEN:\n # self.new_board()\n # self.gamestate = FIRST_PLAYER\n\n\n #duplication /!\\\n if (self.board[y][x] == self.EMPTY and self.p2pGame.isReady):\n if(self.p2pGame.playerTurn == 'X' and self.player == 1):\n self.new_move(x, y, self.player)\n\n if self.has_won(self.player):\n self.gamestate = self.STATE_GAME_OVER\n if self.player == 1:\n self.gameover_screen('X Gagne')\n data = \"--W:X\"\n else:\n self.gameover_screen('O Gagne')\n data = \"--W:O\"\n self.p2pGame.playerTurn = 'X'\n self.p2pGame.sendTicTacToeData(text=data)\n\n\n elif self.is_a_draw():\n self.gamestate = self.STATE_GAME_OVER\n self.gameover_screen('Egalité')\n data = \"--D\"\n self.p2pGame.playerTurn = 'X'\n self.p2pGame.sendTicTacToeData(text=data)\n\n else:\n data = \"--X:\"+ str(x) + \":\" + str(y)\n self.p2pGame.playerTurn = 'O'\n self.p2pGame.sendTicTacToeData(text=data)\n # self.gamestate = self.STATE_O_TURN\n #self.launch()\n elif(self.p2pGame.playerTurn == 'O' and self.player == 2):\n self.new_move(x, y, self.player)\n\n if self.has_won(self.player):\n self.gamestate = self.STATE_GAME_OVER\n if self.player == 1:\n self.gameover_screen('X Gagne')\n data = \"--W:X\"\n else:\n self.gameover_screen('O Gagne')\n data = \"--W:O\"\n self.p2pGame.playerTurn = 'X'\n self.p2pGame.sendTicTacToeData(text=data)\n\n\n elif self.is_a_draw():\n self.gamestate = self.STATE_GAME_OVER\n self.gameover_screen('Egalité')\n data = \"--D\"\n self.p2pGame.playerTurn = 'X'\n self.p2pGame.sendTicTacToeData(text=data)\n\n else:\n data = \"--O:\"+ str(x) + \":\" + str(y)\n self.p2pGame.playerTurn = 'X'\n self.p2pGame.sendTicTacToeData(text=data)\n # self.gamestate = self.STATE_O_TURN\n #self.launch()\n elif self.gamestate == self.STATE_GAME_OVER:\n #reset\n self.new_board()\n self.gamestate = self.FIRST_PLAYER\n self.p2pGame.sendPlayAgain(\"--A\")", "def try_click(self, mouse_y: int, mouse_x: int) -> bool:\n if self.is_intersecting(mouse_y, mouse_x):\n self.on_click()\n return True\n return False", "def check_play_button(si_settings,screen,stats,sb,play_button,ship,aliens,bullets,mouse_x,mouse_y):\n button_clicked = play_button.rect.collidepoint(mouse_x,mouse_y)\n if button_clicked and not stats.game_active:\n #Hides mouse\n pygame.mouse.set_visible(False)\n #reset stats\n si_settings.initalize_dynamic_settings()\n stats.reset_stats()\n stats.game_active = True\n #reset Scoreboard\n sb.prep_score()\n sb.prep_high_score()\n sb.prep_level()\n sb.prep_ships()\n #Empty aliens and bullets\n aliens.empty()\n bullets.empty()\n #creates new fleet and centers ship\n create_fleet(si_settings,screen,ship,aliens)\n ship.center_ship()", "def process_events(self):\n\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n return True\n if event.type == pygame.MOUSEBUTTONDOWN:\n if self.game_over:\n self.__init__()\n\n return False", "def on_left_click(self, client, game) -> None:\n pass", "def handle_right_click(self):\n if not self.game_in_progress:\n return\n if self.first_click:\n self.first_click = False\n self.timer.start(1000)\n sender = self.sender()\n row = 0\n col = 0\n for row in range(self.rows):\n for col in range(self.cols):\n if self.button_array[row][col] == sender:\n break\n else:\n continue\n break\n # print 'Received right click:', row, ',', col\n status = self.board.getcellstatus(row, col)\n if status == CellStatus.Opened:\n return\n elif status == CellStatus.Closed:\n self.remainingminecount = self.remainingminecount - 1\n self.mines_lcd.display(str(self.remainingminecount))\n self.board.setcellstatus(row, col, CellStatus.MarkedAsMine)\n self.button_array[row][col].setIcon(QtGui.QIcon(\"icons/Flag.png\"))\n elif status == CellStatus.MarkedAsMine:\n self.remainingminecount = self.remainingminecount + 1\n self.mines_lcd.display(str(self.remainingminecount))\n self.board.setcellstatus(row, col, CellStatus.MarkedAsSuspectedMine)\n self.button_array[row][col].setIcon(QtGui.QIcon(\"icons/questionmark.png\"))\n elif status == CellStatus.MarkedAsSuspectedMine:\n self.board.setcellstatus(row, col, CellStatus.Closed)\n self.button_array[row][col].setIcon(QtGui.QIcon(\"icons/unopenedsquare.png\"))", "def clickDone(self):\n\n # Hide done button\n self.clickDone.place_forget()\n\n # Hide all ships and their names\n self.canvas.tag_lower('ship')\n self.canvas.tag_lower('text')\n self.canvas.tag_bind('square', '<Button-1>', self.fire)\n self.players.updateWidget()\n\n # If opponent is computer, unbind left-click trigger\n # This prevents user from left-clicking\n if game2.isComputer == 1:\n self.canvas.tag_unbind('square', '<Button-1>')\n self.players.frame1.title(\"%s's turn\" % self.players.usernames[1])\n self.players.frame2.title(\"%s's turn\" % self.players.usernames[0])\n showDialogBox(\"%s's turn first\" % self.players.usernames[0])", "def _check_play_button(self, mouse_pos):\n\t\tbutton_clicked = self.play_button.rect.collidepoint(mouse_pos)\n\t\tif button_clicked and not self.stats.game_active:\n\t\t\t# Reset the game settings.\n\t\t\tself.settings.initialize_dynamic_settings()\n\t\t\tself.stats.reset_stats()\n\t\t\tself.stats.game_active = True\n\t\t\tself.sb.prep_score()\n\t\t\tself.sb.prep_pigeons()\n\t\t\t# Hide the cursor.\n\t\t\tpygame.mouse.set_visible(False)\n\n\t\t# Get rid of any remaining autos and droppings.\n\t\tself.autos.empty()\n\t\tself.droppings.empty()\n\n\t\t# Create a new fleet and center the pigeon\n\t\tself._create_fleet()\n\t\tself.pigeon.center_pigeon()", "def again(self):\n pygame.display.update()\n clock.tick(15)\n while True:\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n quit()\n if event.type == pygame.MOUSEBUTTONDOWN:\n pygame.event.post(event)\n\n background()\n position = ((width / 2), (height / 3))\n text_display(\"Wygrana!!!\", 100, black, ((width / 2), (height / 5)))\n text_display(\"Czy chcesz zagrać ponownie?\", 70, black, position)\n mouse = pygame.mouse.get_pos()\n button_yes = Buttton(350, 250, 100, 50, green, \"tak\", 30)\n button_yes.show()\n button_yes.backlight(mouse)\n button_no = Buttton(350, 350, 100, 50, green, \"nie\", 30)\n button_no.show()\n button_no.backlight(mouse)\n\n if button_yes.is_clicked(mouse):\n self.board = Board()\n self.choose_level()\n\n pygame.display.update()\n clock.tick(15)\n\n if button_no.is_clicked(mouse):\n pygame.quit()\n quit()", "def on_press(self):\n self.pressed = True", "def on_press(self):\n self.pressed = True", "def _check_play_button(self, mouse_pos):\n if self.play_button.rect.collidepoint(mouse_pos) and not self.stats.game_active:\n self.stats.reset_stats()\n self.settings.initialize_dynamic_settings()\n self.stats.game_active = True\n\n #Hide mouse cursor\n pygame.mouse.set_visible(False)\n\n # Get rid of any leftover aliens and bullets\n self.aliens.empty()\n self.bullets.empty()\n\n #Create a new fleet and center the ship.\n self._create_fleet()\n self.ship.center_ship()\n\n self.scoreboard.prep_score()\n self.scoreboard.prep_high_score()\n self.scoreboard.prep_ships()", "def run_button(self):\r\n self.step = False # Clear step command\r\n self.is_pause = False\r\n self.run_command()", "def onMouseLeftDown(self, event):\n # [NOTE] No need to call self.choice(). It is enough to call\n # event.Skip() and the machine will be called self.OnButtonClick()\n event.Skip()", "def control(self):\n while not (self.game_over() or self.quit):\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n self.quit = True\n elif event.type == pygame.KEYDOWN:\n if event.key == pygame.K_r:\n self.play()\n elif event.key == pygame.K_m:\n self.__init__()\n elif event.key == pygame.K_LEFT and len(self.sequence)>=2:\n self.sequence.pop()\n self.board = self.sequence.pop()\n self.draw()\n elif event.key == pygame.K_1:\n self.tip(1)\n elif event.key == pygame.K_2:\n self.tip(2)\n elif event.key == pygame.K_3:\n self.tip(3)\n elif event.key == pygame.K_4:\n self.tip(4)\n elif event.key == pygame.K_5:\n self.tip(5)\n \n elif event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:\n ## if mouse is pressed get position of cursor ##\n pos = pygame.mouse.get_pos()\n ## check if cursor is on button ##\n for i in range(len(self.buttons)):\n for j in range(len(self.buttons[i])):\n if self.buttons[i][j].collidepoint(pos):\n if self.selected == None:\n self.selected = [i,j]\n elif self.selected == [i,j]:\n self.selected = None\n elif self.board[self.selected[0]][self.selected[1]]==0:\n self.selected = [i,j]\n else:\n if self.move(i,j):\n self.selected = None\n self.draw()\n return True\n else:\n self.selected = None\n self.draw()\n return False\n self.draw()\n return False", "def startButtonPressed(self):\n self.showtime = time.time()\n\n self.save_file = open(os.path.join(args.parent_img_path, self.save_file_name), \"a\")\n self.save_file.write(\"\\nStart Button Pressed\\n\")\n self.save_file.close()\n\n # Timers for x axis scrolling.\n self.tmr = QElapsedTimer()\n self.tmr.start()\n print(\"Start button has been pressed!\")\n\n self.startButton.setEnabled(False)\n self.stopButton.setEnabled(True)\n self.reset.setEnabled(False)\n if self.showCTWM:\n self.setPointsCTWM.start() # starts the algorithm\n if self.showWHM:\n time.sleep(0.1)\n self.setPointsWHM.start()", "def _check_button(self, mouse_pos):\r\n if self.display.easy_button.rect.collidepoint(mouse_pos):\r\n self.settings.set_difficulty(self.settings.easy)\r\n self.ai_game.start_game()\r\n elif self.display.normal_button.rect.collidepoint(mouse_pos):\r\n self.settings.set_difficulty(self.settings.normal)\r\n self.ai_game.start_game()\r\n elif self.display.hard_button.rect.collidepoint(mouse_pos):\r\n self.settings.set_difficulty(self.settings.hard)\r\n self.ai_game.start_game()\r\n elif self.display.quit_button.rect.collidepoint(mouse_pos):\r\n self.ai_game.quit()", "def _playAgain(self):\n self._click()\n if self._last is None and self._touch is not None:\n self._state = STATE_RESET", "def start_game(self):\n print(\"hi there, game started!\")\n self.draw()", "def on_mouse_press(self, _x, _y, _button, _modifiers):\n game_view = MainGame()\n game_view.setup()\n self.window.show_view(game_view)", "def on_mouse_press(self, _x, _y, _button, _modifiers):\n game_view = MainGame()\n game_view.setup()\n self.window.show_view(game_view)", "def on_mouse_press(self, _x, _y, _button, _modifiers):\n game_view = MainGame()\n game_view.setup()\n self.window.show_view(game_view)", "def on_mouse_press(self, _x, _y, _button, _modifiers):\n game_view = MainGame()\n game_view.setup()\n self.window.show_view(game_view)", "def click(self, mouse_pos: Tuple[int, int]):\n self.clicked = self.img_rect and self.img_rect.collidepoint(\n mouse_pos) and not self.clicked\n return self.clicked", "def on_mouse_press(self, x, y, button):\n\n pass", "def game_play(self):", "def clickedAction(self, events):\n print(\"The {} button was clicked!\".format(self.imgname))", "def mouse_click(self,event):\n global drag_sq\n# print \"click at {0} {1}\".format(event.x,event.y)\n# sq = (event.y // sq_size) * 8 + event.x // sq_size\n sq = self.coord_to_sq((event.x, event.y))\n if sq in self.piece_objs:\n drag_sq = sq\n self.canvas.tag_raise(self.piece_objs[sq])\n return", "def mousePressed(): \n if not game_controller.game_over:\n # Creatr new disk only when there's no disk\n # or there's a disk that stopped falling\n if (not game_controller.falling_disk) or \\\n (game_controller.falling_disk and \\\n game_controller.falling_disk.y_vel == 0):\n game_controller.handle_mousePressed(mouseX, mouseY)", "def mouse_click(self,x,y,button,double_click):\n raise NotImplementedError(\"ERROR: Unimplemented function.\")", "def on_next_turn_click(self, button):\n if self.referee.is_game_over():\n Gtk.main_quit()\n else:\n self.do_next_turn(button)\n # if the game is over after this turn, we will shutdown on the next click,\n # so visually alert the player with the button label\n if self.referee.is_game_over():\n button.set_label(GAME_OVER_MSG)", "def on_left_mouse_click(self, event: Event) -> None:\n\t\tself.mouse_state.set_click(event.x, event.y)", "def set_click(self, x: int, y: int) -> None:\n\t\tself.state = MouseState.CLICKED\n\t\tself.click_position = Point(x, y)", "def click(self, event):\n try:\n x_loc, y_loc = self.appWindow.spec_cv.mouse(event)\n trackNo, updated_track = self.model.updateTrackClick(x_loc, y_loc,\\\n self.x_high)\n self.appWindow.spec_cv.updateTrack(trackNo, updated_track)\n self.appWindow.spec_cv.redrawTracks()\n self.locked_track = trackNo\n except TypeError:\n pass", "def _press(self, event):\n # make the drawn box/line visible get the click-coordinates,\n # button, ...\n if self._interactive and self._selection_artist.get_visible():\n self._set_active_handle(event)\n else:\n self._active_handle = None\n\n if ((self._active_handle is None or not self._interactive) and\n self._allow_creation):\n # Clear previous rectangle before drawing new rectangle.\n self.update()\n\n if (self._active_handle is None and not self.ignore_event_outside and\n self._allow_creation):\n x = event.xdata\n y = event.ydata\n self._visible = False\n self.extents = x, x, y, y\n self._visible = True\n else:\n self.set_visible(True)\n\n self._extents_on_press = self.extents\n self._rotation_on_press = self._rotation\n self._set_aspect_ratio_correction()\n\n return False", "def wait_for_click():\r\n global _canvas\r\n global _cue\r\n if _canvas == None:\r\n raise RuntimeError(\"Canvas is not open yet.\")\r\n else:\r\n while True:\r\n _cue = _canvas.wait()\r\n if _cue.getDescription() == 'mouse release': break", "def on_click(button):\n global ttt, choices, count, sym, result, x_pos, o_pos\n\n if count % 2 == 0:\n sym = \"X\"\n else:\n sym = \"O\"\n count += 1\n\n button.config(\n text=sym,\n state='disabled',\n disabledforeground=\"red\") # For cross\n\n x, y = get_coordinates(button)\n x += 1\n y += 1\n x_pos.append((x, y))\n state = gen_state(to_move='O', x_positions=x_pos,\n o_positions=o_pos)\n try:\n choice = choices.get()\n if \"Random\" in choice:\n a, b = random_player(ttt, state)\n elif \"Pro\" in choice:\n a, b = minimax_decision(state, ttt)\n else:\n a, b = alphabeta_player(ttt, state)\n except (ValueError, IndexError, TypeError) as e:\n disable_game()\n result.set(\"It's a draw :|\")\n return\n if 1 <= a <= 3 and 1 <= b <= 3:\n o_pos.append((a, b))\n button_to_change = get_button(a - 1, b - 1)\n if count % 2 == 0: # Used again, will become handy when user is given the choice of turn.\n sym = \"X\"\n else:\n sym = \"O\"\n count += 1\n\n if check_victory(button):\n result.set(\"You win :)\")\n disable_game()\n else:\n button_to_change.config(text=sym, state='disabled',\n disabledforeground=\"black\")\n if check_victory(button_to_change):\n result.set(\"You lose :(\")\n disable_game()", "def check_event(self, event):\r\n if event.type == pygame.MOUSEBUTTONDOWN:\r\n if self.selected:\r\n for item in self.buttons:\r\n item.handleMouseDown(event.pos[0], event.pos[1])\r\n else:\r\n self.tab.handleMouseDown(event.pos[0], event.pos[1])", "def pressTile(self, event):\n clickedTile = event.widget\n if clickedTile.isInPlay(): self.changeSmile(2)\n if not clickedTile.isFlagged():\n clickedTile.buttonPress()\n if not self.minesArmed and event.num == 1:\n self.setUpBombs(event)", "def simulate_button_clicked(self):\n self.simulate_bool = True\n self.update_change()", "def on_mouse_press(self, _x, _y, _button, _modifiers):\n game_view = GameView()\n game_view.setup(level=1)\n arcade.set_background_color(arcade.csscolor.BLACK)\n\n\n try:\n self.music_intro.stop(self.player_music_intro)\n except ValueError:\n print(\"music already finished\") # ValueError: list.remove(x): x not in list media.Source._players.remove(player)\n\n self.window.show_view(game_view)", "def click(self, agent):\n self.grab(agent)\n #eventlet.sleep(5)\n self.degrab(agent)", "def unpress(self):\n if self.unclick:\n self.clicked = False", "def play(self):\n self.accept(\"wheel_up\", self.scrollindex, [-1] )\n self.accept(\"wheel_down\", self.scrollindex, [1] )\n self.accept(\"arrow_up\", self.scrollindex, [-1] )\n self.accept(\"arrow_down\", self.scrollindex, [1] )\n self.accept(\"enter\", self._click)\n if callable(self.data['exit']): self.accept(\"escape\", self.data['exit'])\n for item in self.canvas[\"items\"]: item['state']=DGG.NORMAL", "def start_game(cur_button):\r\n\r\n global time, game_on, wrd_lst\r\n initialize()\r\n time = 180\r\n game_on = True\r\n cmd_refresh()\r\n window.after(1000, minus_time(cur_button))\r\n cur_button.configure(text='stop')", "def normal_run(self):\n super().events_buttons(back=True)\n self.events_delete_btns()\n self.draw()", "def events(self):\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n self.running = False\n self.sleep_time = 0\n return\n\n if event.type == pygame.MOUSEBUTTONUP:\n pos = pygame.mouse.get_pos()\n\n if self.button.collidepoint(pos):\n if self.state == \"solving\":\n self.state = \"stopping\"\n\n if self.state == \"solved\":\n self.state = \"waiting\"\n self.puzzle_state = \"solving\"\n self.button_text = \"Solve!\"\n self.board = self.original_board.copy()\n\n elif self.state == \"waiting\":\n self.state = \"solving\"\n self.button_text = \"Stop!\"\n self.button_color = BUTTON_COLOR_STOP\n\n isSolved = self.solve()\n\n self.button_color = BUTTON_COLOR_SOLVE\n if isSolved:\n self.state = \"solved\"\n self.button_text = \"Clear\"\n self.puzzle_state = \"solved\"\n else:\n if self.state == \"stopping\":\n self.state = \"waiting\"\n self.button_text = \"Solve!\"\n self.puzzle_state = \"solving\"\n else:\n self.state = \"solved\"\n self.button_text = \"Clear\"\n self.puzzle_state = \"failed\"" ]
[ "0.7497747", "0.7181813", "0.698531", "0.6901437", "0.6851919", "0.68466634", "0.6759625", "0.65531003", "0.65473086", "0.6535983", "0.6518915", "0.6467115", "0.64507085", "0.64456236", "0.6402903", "0.6379502", "0.63639313", "0.6360009", "0.63344973", "0.6327313", "0.6322559", "0.62664294", "0.62416464", "0.6235618", "0.6234628", "0.62339586", "0.6231782", "0.6226885", "0.6192374", "0.6187437", "0.61839366", "0.6164915", "0.6129258", "0.6128474", "0.61151165", "0.60971594", "0.60948956", "0.60860467", "0.6067218", "0.6058827", "0.60559386", "0.60526353", "0.6050422", "0.60473", "0.6046245", "0.6045652", "0.60446346", "0.6014226", "0.60137427", "0.5996446", "0.5995639", "0.5993707", "0.59759486", "0.59754723", "0.59683627", "0.59597075", "0.59580415", "0.5952779", "0.5938509", "0.59365827", "0.5930544", "0.5928116", "0.5906468", "0.5906468", "0.5903088", "0.58861303", "0.58791864", "0.5875304", "0.58644974", "0.5861237", "0.58604974", "0.58457214", "0.58416563", "0.58416563", "0.58416563", "0.58416563", "0.58344555", "0.5833993", "0.58285296", "0.58244014", "0.58210355", "0.5810459", "0.5809953", "0.5802353", "0.5796652", "0.57958335", "0.5793365", "0.578311", "0.57790375", "0.5777017", "0.577419", "0.57680094", "0.57578975", "0.5757842", "0.57571465", "0.57490355", "0.5748678", "0.5744791", "0.5735798", "0.5726894" ]
0.5729173
99
Generate a document containing the available variable types.
def generate_type_hierarchy(ctx): ctx.run("./env/bin/python -m puresnmp.types > doc/typetree.rst")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def document_types(db: Session = Depends(get_db)):\n return get_document_types(db)", "def _variable_types(self):\n return self._variable_single_types + self._variable_array_types", "def doc_types(self):\n return self._extract_set('doc_type')", "def genotype(self):\n\t\tgenotype = \"\"\n\t\tfields = vars(self)\n\t\tfor name, field in fields.items():\n\t\t\tif isinstance(field, Pattern):\n\t\t\t\tgenotype += field.genotype()\n\t\t\telse:\n\t\t\t\tgenotype += str(field)\n\t\t\tgenotype += \"\\0\"\n\n\t\treturn genotype", "def data_types():\n\n return ...", "def variable_types(self, data_key, only_type=None):\r\n if self[data_key].meta['columns'] is None:\r\n return 'No meta attached to data_key: %s' %(data_key)\r\n else:\r\n types = {\r\n 'int': [],\r\n 'float': [],\r\n 'single': [],\r\n 'delimited set': [],\r\n 'string': [],\r\n 'date': [],\r\n 'time': [],\r\n 'array': []\r\n }\r\n not_found = []\r\n for col in self[data_key].data.columns:\r\n if not col in ['@1', 'id_L1', 'id_L1.1']: \r\n try:\r\n types[\r\n self[data_key].meta['columns'][col]['type']\r\n ].append(col)\r\n except:\r\n not_found.append(col) \r\n for mask in self[data_key].meta['masks'].keys():\r\n types[self[data_key].meta['masks'][mask]['type']].append(mask)\r\n if not_found:\r\n print '%s not found in meta file. Ignored.' %(not_found)\r\n if only_type:\r\n return types[only_type]\r\n else:\r\n return types", "def TYPES():\n\n if config.types_cache:\n return config.types_cache\n\n types = {\n 'actionscript': '.as .mxml',\n 'asm': '.asm .s',\n 'batch': '.bat .cmd',\n #'binary': 'Binary files, as defined by Perl's -B op (default: off)',\n 'cc': '.c .h .xs',\n 'cfmx': '.cfc .cfm .cfml',\n 'cpp': '.cpp .cc .cxx .m .hpp .hh .h .hxx',\n 'csharp': '.cs',\n 'css': '.css',\n 'elisp': '.el',\n 'erlang': '.erl',\n 'fortran': '.f .f77 .f90 .f95 .f03 .for .ftn .fpp',\n 'haskell': '.hs .lhs',\n 'hh': '.h',\n 'html': '.htm .html .shtml .xhtml',\n 'java': '.java .properties',\n 'js': '.js',\n 'jsp': '.jsp .jspx .jhtm .jhtml',\n 'lisp': '.lisp .lsp',\n 'lua': '.lua',\n 'make': 'Makefiles',\n 'mason': '.mas .mhtml .mpl .mtxt',\n 'objc': '.m .h',\n 'objcpp': '.mm .h',\n 'ocaml': '.ml .mli',\n 'parrot': '.pir .pasm .pmc .ops .pod .pg .tg',\n 'perl': '.pl .pm .pod .t',\n 'php': '.php .phpt .php3 .php4 .php5',\n 'plone': '.pt .cpt .metadata .cpy',\n 'python': '.py',\n 'ruby': '.rb .rhtml .rjs .rxml .erb',\n 'scheme': '.scm',\n 'shell': '.sh .bash .csh .tcsh .ksh .zsh',\n 'smalltalk': '.st',\n 'sql': '.sql .ctl',\n 'tcl': '.tcl .itcl .itk',\n 'tex': '.tex .cls .sty',\n 'tt': '.tt .tt2 .ttml',\n 'vb': '.bas .cls .frm .ctl .vb .resx',\n 'vim': '.vim',\n 'xml': '.xml .dtd .xslt .ent',\n 'yaml': '.yaml .yml',\n }\n\n items = {}\n for ftype, ext_list in types.iteritems():\n items[ftype] = ext_list.split()\n\n config.types_cache = items\n return items", "def injectTypes (g):\n\tself=__module__\n\ts=g.symbols\n\tg.token('TYPE_VAR', '_|[A-Z][A-Z0-9]*')\n\tg.rule('TypeParameter', s.LSB, listOf(g.agroup(s.TYPE_VAR, s.FQNAME), s.COMMA, g), s.RSB)\n\tg.rule('TypeReference', s.FQNAME._as('name'), s.TypeParameter.optional()._as('parameters'))\n\tg.group('TypeValue')\n\tg.rule('TypeExpression')\n\tg.rule('TypeUnionSuffix', s.PIPE, s.TypeValue)\n\tg.group('TypePrefix', s.TypeReference)\n\tg.group('TypeSuffix', s.TypeUnionSuffix)\n\tg.rule('TypeExpression', s.TypePrefix, s.TypeSuffix.zeroOrMore())\n\tg.rule('TypeParens', s.LP, listOf(s.TypeExpression, s.COMMA, g), s.RP)\n\ts.TypeValue.set(s.TypeParens, s.TypeExpression)\n\tg.rule('TypeSlot', s.CheckIndent, g.aword('@slot'), s.NAME._as('name'), g.arule(s.COLON, s.TypeValue).optional()._as('value'), s.EOL, s.Documentation.optional()._as('documentation'))\n\tg.group('TypeLine', s.TypeSlot)\n\tg.group('TypeCode', s.COMMENT, s.TypeLine)\n\tg.rule('TypeBody', s.Indent, s.TypeCode.zeroOrMore(), s.Dedent)\n\tg.rule('Type', s.CheckIndent, g.aword('@type'), s.TypeReference._as('name'), g.arule(s.COLON, s.TypeValue).optional()._as('value'), s.EOL, s.Documentation.optional()._as('documentation'), s.TypeBody.optional())", "def ntypes(self): # -> list[str]:\n ...", "def type_skeleton():\n return {\"base_type\": None,\n \"values\": {\"names\": [], \"codes\": []}}", "def make_typedefs(self):\n type_dict = self.python_madz_types_dict + self.mangled_namespace\n res = \"{} = {{}}\\n\".format(type_dict)\n\n for node in self.description.declarations():\n varname = self.python_madz_types + self.mangled_namespace + \"___\" + node.name\n # Hack to get self referential top level structs.\n if (node.type.node_type() == pdl.TypeStruct):\n self._is_top_level = varname\n res += self.gen_type_string(node.type)\n res += \"\\n\"\n else:\n res += \"{} = {}\\n\".format(varname, self.gen_type_string(node.type))\n res += \"{}['{}'] = {}\\n\".format(type_dict, node.name, varname)\n return res", "def valid_doc_types():\r\n return \", \".join(DOC_PATHS.keys())", "def describeSchemaTypes(self):\n if not self.__isSchemaCached():\n self.__cacheSchema()\n queryFile = self.__cacheLocation + \"/SELECT TYPES.json\"\n selectTypesReply = json.load(open(queryFile))\n for result in selectTypesReply[\"results\"]:\n if float(result[\"number\"]) < 1.1: \n continue # TEMP - ignore under 1.1\n queryFile = self.__cacheLocation + \"/DESCRIBE TYPE \" + re.sub(r'\\.', '_', result[\"number\"]) + \".json\"\n if not os.path.isfile(queryFile):\n raise Exception(\"Expected Schema for %s to be in Cache but it wasn't - exiting\" % result[\"number\"])\n jreply = json.load(open(queryFile))\n if \"count\" in result:\n jreply[\"count\"] = result[\"count\"]\n yield jreply", "def _get_types(self):\n\n db = Database()\n self.c_built_ins = list(map(lambda tup: tup[0], db.select_built_types()))\n self.c_built_in_array_types = r'^(' + '|'.join(self.escaped(self.c_built_ins)) + ')\\[[0-9]*\\]'\n self.c_types = list(map(lambda tup: tup[0], db.select_types()))\n self.c_array_types = r'^(' + '|'.join(self.escaped(self.c_types)) + ')\\[[0-9]*\\]'\n db.close_connection()", "def ntypes(self): # -> None:\n ...", "def fortran_type_definition(self) -> str:\n result = ''\n public = ''\n if self.public:\n public = ', public'\n for val_name, val_value in self.values:\n result += 'integer, parameter{} :: {}_{}_{} = {}\\n'.format(\n public, self.f_prefix, self.name, val_name, val_value)\n result += ('integer, parameter{0} :: {1}_{2} = selected_int_kind(9)\\n\\n'\n ).format(public, self.f_prefix, self.name)\n return indent(result, 4*' ')", "def _variable_single_types(self):\n return [\n 'Binary',\n 'KeyValue',\n 'String',\n 'TCEntity',\n 'TCEnhancedEntity',\n ]", "def gen_dump_code(var_name: str, ty: type) -> str:\n\n tys = type_str(ty)\n if tys.startswith(\"Set[\"):\n return \"print(json.dumps({k : 1 for k in \" + var_name + \"})) # write sets as dictionaries\\n\"\n return f\"print(json.dumps({var_name}))\\n\"", "def docType():\n return (u'<?xml version=\"1.0\" encoding=\"UTF-8\"?>\\n'\n u'<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 '\n u'Transitional//EN\" '\n u'\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\\n')", "def document_type(self, key, value):\n _doc_type = self.get(\"document_type\", {})\n\n def doc_type_mapping(val):\n if val:\n return mapping(DOCUMENT_TYPE, val)\n\n for v in force_list(value):\n val_a = doc_type_mapping(clean_val(\"a\", v, str))\n val_b = doc_type_mapping(clean_val(\"b\", v, str))\n\n if not val_a and not val_b and not _doc_type:\n raise UnexpectedValue(subfield=\"a\")\n\n if val_a and val_b and (val_a != val_b != _doc_type):\n raise ManualImportRequired(\n subfield=\"a or b - \" \"inconsistent doc type\"\n )\n if val_a:\n if _doc_type and _doc_type != val_a:\n raise ManualImportRequired(\n subfield=\"a\" \"inconsistent doc type\"\n )\n _doc_type = val_a\n if val_b:\n if _doc_type and _doc_type != val_a:\n raise ManualImportRequired(\n subfield=\"b\" \"inconsistent doc type\"\n )\n _doc_type = val_b\n return _doc_type", "def fortran_type_definition(self) -> str:\n result = ''\n if self.public:\n result += f'public :: {self.f_prefix}_{self.name}\\n'\n\n result += f'type {self.f_prefix}_{self.name}\\n'\n for value in self.values:\n result += f' logical :: {value} = .false.\\n'\n result += '\\n'\n result += 'contains\\n'\n result += f' procedure :: to_int => {self.f_prefix}_{self.name}_to_int_\\n'\n result += 'end type\\n'\n return indent(result, 4*' ')", "def getTypesList():\n return Gw2Spidy._request('types')['results']", "def data_types(self):", "def list_available_document_types(cls):\n\n response = cls._client.get(\"automatedDocumentOptions/\")\n return from_api(response.json())", "def _doc_create(type, data):\n doc = dict(data)\n doc.update({'model_type': type})\n return doc", "def test_typedef00205m_type_def00205m1_p(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/ElemDecl/typeDef/typeDef00205m/typeDef00205m.xsd\",\n instance=\"sunData/ElemDecl/typeDef/typeDef00205m/typeDef00205m1_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )", "def test_get_types(self):\n pass", "def readDefinedTypes(self):\n types = {}\n for m in re.finditer(\"TYPE (.*) = (.*);\", self.data):\n typename, typetype = m.groups() \n if typetype in self.types.keys():\n types[typename] = typetype\n \n return types", "def _types(cls):\n return {}", "def exportTypes( c ) :\n assert str(type(c)) == \"<type '_mysql.connection'>\"\n xml = \"\"\n cT = sqlQuery ( c, \"select * from CrisisKind;\" )\n oT = sqlQuery ( c, \"select * from OrganizationKind;\" )\n pT = sqlQuery ( c, \"select * from PersonKind;\" ) \n for i in cT:\n xml += openTagAtt (\"CrisisKind\", \"crisisKindIdent\", i[0])\n xml += openCloseTag (\"Name\", i[1])\n xml += openCloseTag (\"Description\", i[2])\n xml += closeTag (\"CrisisKind\") \n for i in oT:\n xml += openTagAtt (\"OrganizationKind\", \"organizationKindIdent\", i[0])\n xml += openCloseTag (\"Name\", i[1])\n xml += openCloseTag (\"Description\", i[2])\n xml += closeTag (\"OrganizationKind\")\n for i in pT:\n xml += openTagAtt (\"PersonKind\", \"personKindIdent\", i[0])\n xml += openCloseTag (\"Name\", i[1])\n xml += openCloseTag (\"Description\", i[2])\n xml += closeTag (\"PersonKind\")\n assert str ( type ( xml ) ) == \"<type 'str'>\"\n return xml", "def ntypes(self): # -> list[None]:\n ...", "def fortran_typedefs(self) -> str:\n result = ''\n public = ''\n if self.public is None:\n return result\n if self.public:\n public = ', public'\n for err_name, err_code in error_codes.items():\n result += ' integer, parameter{} :: {}_{} = {}\\n'.format(\n public, self.f_prefix, err_name, err_code)\n result += '\\n'\n\n for kind_name, kind_def in kinds.items():\n result += ' integer, parameter{} :: {}_{} = {}\\n'.format(\n public, self.f_prefix, kind_name, kind_def)\n result += '\\n'\n\n for member in self.members:\n result += member.fortran_type_definition()\n if self.public:\n result += member.fortran_public_declarations()\n\n return result", "def DocumentType(self, default='article'):\n return self.data.get('document_type', [{}])", "def type_list():\n for type_ in orm.DataFlagType.select():\n click.echo(type_.name)", "def _type(self) -> str:\n ...", "def gettypes(self):\n return [str(self.sd.xlate(t[0])) for t in self.sd.types]", "def getTypeString(self):\n return '_'.join(self.types)", "def readAggregatedSimpleTypes(self):\n types = {}\n # SETs\n for m in re.finditer(\"TYPE (\\w*) = SET (.*);\", self.data):\n typename, typetype = m.groups() \n types[typename] = 'SET ' + typetype\n \n # BAGs\n for m in re.finditer(\"TYPE (\\w*) = BAG (.*);\", self.data):\n typename, typetype = m.groups() \n types[typename] = 'BAG ' + typetype\n \n # LISTs\n for m in re.finditer(\"TYPE (\\w*) = LIST (.*);\", self.data):\n typename, typetype = m.groups() \n types[typename] = 'LIST ' + typetype\n \n # ARRAYs\n for m in re.finditer(\"TYPE (\\w*) = ARRAY (.*);\", self.data):\n typename, typetype = m.groups() \n types[typename] = 'ARRAY ' + typetype\n \n # STRING vectors\n for m in re.finditer(\"TYPE (\\w*) = STRING\\((.*);\", self.data):\n typename, typetype = m.groups() \n types[typename] = 'STRING(' + typetype\n \n return types", "def type_():\n pass", "def test_typedef00204m_type_def00204m1_p(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/ElemDecl/typeDef/typeDef00204m/typeDef00204m.xsd\",\n instance=\"sunData/ElemDecl/typeDef/typeDef00204m/typeDef00204m1_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )", "def test_typedef00101m_type_def00101m1_p(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/ElemDecl/typeDef/typeDef00101m/typeDef00101m.xsd\",\n instance=\"sunData/ElemDecl/typeDef/typeDef00101m/typeDef00101m1_p.xml\",\n class_name=\"Answer\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )", "def test_typedef00201m_type_def00201m1_p(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/ElemDecl/typeDef/typeDef00201m/typeDef00201m.xsd\",\n instance=\"sunData/ElemDecl/typeDef/typeDef00201m/typeDef00201m1_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )", "def display(self, type_get):\n data = self.build_data()\n for word_type, content in data.items():\n count_def = 1\n if type_get and self.word_type_dict[word_type] != type_get:\n continue\n pron = content[1]\n print(bcolors.BOLD + bcolors.YELLOW + \"%s /%s/ (%s)\" % (self.word, pron, self.word_type_dict[word_type])\n + bcolors.ENDC)\n for sense_dict in content[0]:\n type_def = self.get_type_of_def(sense_dict)\n if type_def:\n type_def = \" \" + type_def + \" \"\n print(\"%s.\" % str(count_def) +\n bcolors.ITALIC + bcolors.GREEN + \"%1s\" % type_def + bcolors.ENDC +\n \"%s\" % self.chunk_str(sense_dict['definitions'][0]))\n if 'examples' in sense_dict:\n self.display_examples(sense_dict['examples'])\n\n print(\"\\r\")\n\n if 'subsenses' in sense_dict:\n self.display_subsenses(sense_dict['subsenses'], count_def)\n\n print(\"\\r\")\n count_def += 1", "def test_typedef00403m_type_def00403m1_p(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/ElemDecl/typeDef/typeDef00403m/typeDef00403m.xsd\",\n instance=\"sunData/ElemDecl/typeDef/typeDef00403m/typeDef00403m1_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )", "def readTypes(self):\r\n types = {}\r\n for m in re.finditer(\"TYPE (.*) = (.*);\", self.data):\r\n typename, typetype = m.groups() \r\n if typetype in self.SIMPLETYPES:\r\n types[typename] = typetype\r\n else:\r\n types[typename] = \"#\" + typetype\r\n \r\n return types", "def test_typedef00202m_type_def00202m1_p(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/ElemDecl/typeDef/typeDef00202m/typeDef00202m.xsd\",\n instance=\"sunData/ElemDecl/typeDef/typeDef00202m/typeDef00202m1_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )", "def defineType(name,numSites,bindsTo,symmetric,maxCount):\n\t\t\n\ttypePrimitive.append([name,numSites,bindsTo,symmetric,maxCount])\n\treturn", "def test_typedef00401m_type_def00401m1_p(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/ElemDecl/typeDef/typeDef00401m/typeDef00401m.xsd\",\n instance=\"sunData/ElemDecl/typeDef/typeDef00401m/typeDef00401m1_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )", "def validVarConstructType(self,vartype):\r\n indArray = vartype.find('[]')\r\n if indArray>0:\r\n thisType = vartype[0:indArray]\r\n isArray = True\r\n else:\r\n thisType = vartype\r\n isArray = False\r\n \r\n if thisType in ('rng','range'):\r\n type = 'range'\r\n elif thisType in ('rate'):\r\n type = 'rate'\r\n elif thisType in ('amt','amount'):\r\n type = 'amount'\r\n elif thisType in ('minamt','minamount'):\r\n type = 'minamount'\r\n elif thisType in ('bool'):\r\n type = 'bool'\r\n else:\r\n print 'variable type must be range, rate, amount, minamount, bool (or abbreviated forms)'\r\n return False, ''\r\n \r\n return True, type, isArray", "def get_check_types():", "def gen_load_code(var_name: str, ty: type) -> str:\n\n tys = type_str(ty)\n\n if tys.startswith(\"Set[\"):\n assert tys.endswith(\"]\")\n inside = tys[4:-1]\n ans = f\"{var_name} = set(json.load(sys.stdin))) # convert set (stored as json dictionary)\"\n assertions = [f\"all(isinstance(x, {inside}) for x in {var_name})\"]\n else:\n ans = f\"{var_name} = json.load(sys.stdin)\"\n num_lists = tys.count(\"List[\")\n assert tys.startswith(\"List[\" * num_lists) and tys.endswith(\"]\" * num_lists)\n inside = tys[5 * num_lists: len(tys) - num_lists]\n if num_lists == 0:\n assertions = [f\"isinstance({var_name}, {inside})\"]\n else:\n assertions = [f\"isinstance({var_name}, list)\"]\n if num_lists == 1:\n assertions.append(f\"all(isinstance(x, {inside}) for x in {var_name})\")\n else:\n assertions.append(f\"all(isinstance(x, list) for x in {var_name})\")\n if num_lists == 2:\n assertions.append(f\"all(isinstance(y, {inside}) for x in {var_name} for y in x)\")\n elif num_lists == 3:\n assertions += [f\"all(isinstance(y, list) for x in {var_name} for y in x)\",\n f\"all(isinstance(z, {inside}) for x in {var_name} for y in x for z in y)\"]\n else:\n assert False, f'Unknown type {tys}'\n\n assert inside in [\"int\", \"float\", \"bool\", \"str\"], f'Unknown type {tys}'\n return ans + \"\\n\\n\" + \"\\n\".join(f\"assert {a}, 'Type error: expecting `{tys}`'\" for a in assertions)", "def ntypes(self) -> Sequence[str]:\n ntypes = list(self.num_nodes_dict.keys())\n return ntypes", "def test_typedef00203m_type_def00203m1_p(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/ElemDecl/typeDef/typeDef00203m/typeDef00203m.xsd\",\n instance=\"sunData/ElemDecl/typeDef/typeDef00203m/typeDef00203m1_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )", "def getProposalTypesVocab(self):\n list = DisplayList()\n # Acquire the types\n types = self.aq_inner.aq_parent.getProposalTypes()\n for type in types:\n list.add(type, type)\n return list", "def display_file_types():\n\n print 'Available file types. Each line contains the file type and the list of extensions by those the file type is determined. To include FOOBAR file type to search use --FOOBAR, to exlude use --noFOOBAR. You can include and exclude a number of file types.'\n for ftype, extensions in TYPES().iteritems():\n print '%s: %s' % (ftype, ', '.join(extensions))", "def test_typedef00402m_type_def00402m1_p(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/ElemDecl/typeDef/typeDef00402m/typeDef00402m.xsd\",\n instance=\"sunData/ElemDecl/typeDef/typeDef00402m/typeDef00402m1_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )", "def _infer_variable_types_from_data(raw_data):\n raise NotImplementedError()", "def getTypeDefinition(file, line, offset):\n args = {\"file\": file, \"line\": line, \"offset\": offset}\n response = send_request(\"typeDefinition\", args)\n return get_response_body(response)", "def test_typedef00301m_type_def00301m1_p(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/ElemDecl/typeDef/typeDef00301m/typeDef00301m.xsd\",\n instance=\"sunData/ElemDecl/typeDef/typeDef00301m/typeDef00301m1_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )", "def prettyprint(self, _file):\n xstr = \"var \" + self.name + \" \" + self.type.desc()\n _file.write(xstr + \"\\n\")", "def regenerate_variables(self):\n\n # Let us not forget to remove fields that might be empty by now\n if hasattr(self, '_var_kinds'):\n for k in self._var_kinds:\n attrname = camel2underscores(k)\n try:\n delattr(self, attrname)\n except AttributeError:\n pass # The attribute may not have been set up yet\n\n _var_kinds = defaultdict(DictList)\n for k, v in self._var_dict.items():\n _var_kinds[v.__class__.__name__].append(v)\n\n for k in _var_kinds:\n attrname = camel2underscores(k)\n setattr(self, attrname, _var_kinds[k])\n\n self._var_kinds = _var_kinds", "def show_type(type_):\n click.echo(format_type(type_))", "def make_def_function_types(self):\n res = \"\"\n for node in self.description.definitions():\n if isinstance(node.type, pdl.TypeFunction):\n res += \"{} = {}\\n\".format(self.python_madz_deftypes + self.mangled_namespace + \"___\" + node.name, self.gen_type_string(node.type))\n\n return res", "def get_type_doc_name(type):\n name = type.name\n if type.is_simple:\n return _get_simple_type_mapping(name)\n elif type.is_enum:\n return '{0}.{1}'.format(get_package_name(name), get_enum_name(name))\n elif type.is_complex:\n return '{0}.{1}'.format(get_package_name(name), get_class_name(name))", "def number_types(corpus):\n number_of_types = len(set(corpus))\n return number_of_types", "def _get_types(self):\n types = {'word': [constants.PAD, constants.UNK],\n 'char': [constants.PAD, constants.UNK],\n 'tag': [constants.PAD],\n }\n\n for _, filepath in self.directory.items():\n if filepath is not None:\n conll_file = os.path.basename(filepath) # get name of conll file\n types['word'].extend(set(self.conll_parser.words(conll_file)))\n types['char'].extend(set(chain(*[list(w) for w in self.conll_parser.words(conll_file)])))\n types['tag'].extend(set([tag[-1] for tag in self.conll_parser.tagged_words(conll_file)]))\n\n # ensure that we have only unique types\n types['word'] = list(set(types['word']))\n types['char'] = list(set(types['char']))\n types['tag'] = list(set(types['tag']))\n\n return types", "def types():\n types = session.query(Type).all()\n return jsonify(types=[t.name for t in types])", "def printListOfCalibTypes (self) :\n print '\\nprintListOfCalibTypes(): list_of_clib_types:' #, self.list_of_clib_types\n for type in self.list_of_clib_types : print ' ', type", "def opinion_type_list():\n for type_ in orm.DataFlagOpinionType.select():\n click.echo(type_.name)", "def get_user_defined_types(self):\n query = mssqlqueries.get_user_defined_types()\n logger.info(u'UDTs query: %s', query)\n for tabular_result in self.execute_query(query):\n for row in tabular_result[0]:\n yield (row[0], row[1])", "def test_types(question):\n instance = question[\"instance\"]\n for name, data in instance.get(\"variables\", {}).items():\n assert \"optional\" not in data or isinstance(data[\"optional\"], bool)\n if data.get(\"type\") == \"boolean\":\n assert \"value\" not in data or isinstance(data[\"value\"], bool)\n elif data.get(\"type\") in [\"integer\", \"long\"]:\n assert \"value\" not in data or isinstance(data[\"value\"], int)", "def typedefs(self):\n raise exceptions.NotImplementedError()", "def _set_type_display_info(ctx):\n ctx.meta.type_display_name = ctx.ext.type_display_name = \"Simulation\"\n ctx.ext.type_sortkey = ctx.meta.type_sortkey = \"AC\"", "def test_typedef00601m_type_def00601m1_p(mode, save_output, output_format):\n assert_bindings(\n schema=\"sunData/ElemDecl/typeDef/typeDef00601m/typeDef00601m.xsd\",\n instance=\"sunData/ElemDecl/typeDef/typeDef00601m/typeDef00601m1_p.xml\",\n class_name=\"Root\",\n version=\"1.1\",\n mode=mode,\n save_output=save_output,\n output_format=output_format,\n structure_style=\"filenames\",\n )", "def _generate_type(self, n, modifiers=[], emit_declname = True):\n\t\ttyp = type(n)\n\n\t\t#~ print(n, modifiers)\n\n\t\tif typ == pycparser.c_ast.TypeDecl:\n\t\t\ts = ''\n\t\t\tif n.quals: s += ' '.join(n.quals) + ' '\n\t\t\ts += self.visit(n.type)\n\n\t\t\t# Local variables & parameter renaming.\n\t\t\t#\n\t\t\t# Variable name substitution only applies to local variables or parameters names within function prototypes\n\t\t\t# (thus, global variables and function names need to be excluded)\n\t\t\t#\n\t\t\t# case 1: level-0 function parameters (no remanimg for nested parameters)\n\t\t\t# case 2: local variable declaration (thus excluding functions, global vars, struct-enum-union fields, nested parameters)\n\t\t\t#\n\t\t\tif self.__visitingParam == 1: # case 1\n\t\t\t\tif self.__debug: print(\"SETTING NEWID for [%s,%s] (case I)\") % (self.__currentFunction,n.declname)\n\t\t\t\t#self.newIDs[self.__currentFunction,n.declname] = self.paramprefix + self.__currentFunction + '_'+self.inlineInfix #S:\n\t\t\t\tif (self.__currentFunction,n.declname) in self.newIDs:\n\t\t\t\t\tself.newIDs[self.__currentFunction,n.declname].append((self.paramprefix + self.__currentFunction + '_'+self.inlineInfix,self.__visitingCompound)) #S:\n\t\t\t\telse: \n\t\t\t\t\tself.newIDs[self.__currentFunction,n.declname] = [(self.paramprefix + self.__currentFunction + '_'+self.inlineInfix,self.__visitingCompound)]\n\t\t\t\tn.declname = (self.paramprefix + self.__currentFunction + '_' + self.inlineInfix + n.declname) if n.declname else '' #S:\n\t\t\t\n\t\t\telif (self.__visitingParam == 0 and # case 2\n\t\t\t\t\tself.__visitFuncDef == 0 and\n\t\t\t\t\tn.declname not in self.Parser.funcName and\n\t\t\t\t\t#n.declname not in self.Parser.varNames[''] and\n\t\t\t\t\tself.__currentFunction != '' and\n\t\t\t\t\tself.__visitStructUnionEnum == 0):\n\t\t\t\tif self.__debug: print(\"SETTING NEWID for [%s,%s] (case II)\") % (self.__currentFunction,n.declname)\n\t\t\t\t#S: env.local, the followin two lines are replaced with the following if\n\t\t\t\t#self.newIDs[self.__currentFunction,n.declname] = self.prefix + self.__currentFunction + '_'\n\t\t\t\t#n.declname = self.prefix + self.__currentFunction + '_' + n.declname if n.declname else ''\n\t\t\t\tif self.__init: \n\t\t\t\t\t#self.newIDs[self.__currentFunction,n.declname] = self.prefix + self.__currentFunction + '_' +self.inlineInfix #S:\n\t\t\t\t\tif (self.__currentFunction,n.declname) in self.newIDs:\n\t\t\t\t\t\tself.newIDs[self.__currentFunction,n.declname].append((self.prefix + self.__currentFunction + '_' +self.inlineInfix,self.__visitingCompound)) #S:\n\t\t\t\t\telse: \n\t\t\t\t\t\tself.newIDs[self.__currentFunction,n.declname] = [(self.prefix + self.__currentFunction + '_' +self.inlineInfix,self.__visitingCompound)]\n\t\t\t\t\tn.declname = self.prefix + self.__currentFunction + '_' + self.inlineInfix + n.declname if n.declname else '' #S:\n\t\t\t\telse:\n\t\t\t\t\t#self.newIDs[self.__currentFunction,n.declname] = self.nondetprefix + self.__currentFunction + '_' +self.inlineInfix #S:\n\t\t\t\t\tif (self.__currentFunction,n.declname) in self.newIDs:\n\t\t\t\t\t\tself.newIDs[self.__currentFunction,n.declname].append((self.nondetprefix + self.__currentFunction + '_' +self.inlineInfix,self.__visitingCompound)) #S:\n\t\t\t\t\telse:\n\t\t\t\t\t\tself.newIDs[self.__currentFunction,n.declname] = [(self.nondetprefix + self.__currentFunction + '_' +self.inlineInfix,self.__visitingCompound)]\n\t\t\t\t\tn.declname = self.nondetprefix + self.__currentFunction + '_' + self.inlineInfix + n.declname if n.declname else '' #S:\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t#print n.declname\n\t\t\t\t#print self.newIDs\n\t\n\n\t\t\tnstr = n.declname if n.declname else ''\n\n\t\t\t# Resolve modifiers.\n\t\t\t# Wrap in parens to distinguish pointer to array and pointer to\n\t\t\t# function syntax.\n\t\t\t#\n\t\t\tfor i, modifier in enumerate(modifiers):\n\t\t\t\tif isinstance(modifier, pycparser.c_ast.ArrayDecl):\n\t\t\t\t\tif (i != 0 and isinstance(modifiers[i - 1], pycparser.c_ast.PtrDecl)):\n\t\t\t\t\t\tnstr = '(' + nstr + ')'\n\t\t\t\t\tnstr += '[' + self.visit(modifier.dim) + ']'\n\t\t\t\telif isinstance(modifier, pycparser.c_ast.FuncDecl):\n\t\t\t\t\tif (i != 0 and isinstance(modifiers[i - 1], pycparser.c_ast.PtrDecl)):\n\t\t\t\t\t\tnstr = '(' + nstr + ')'\n\t\t\t\t\tnstr += '(' + self.visit(modifier.args) + ')'\n\t\t\t\telif isinstance(modifier, pycparser.c_ast.PtrDecl):\n\t\t\t\t\tif modifier.quals:\n\t\t\t\t\t\tnstr = '* %s %s' % (' '.join(modifier.quals), nstr)\n\t\t\t\t\telse:\n\t\t\t\t\t\tnstr = '*' + nstr\n\t\t\tif nstr: s += ' ' + nstr\n\t\t\treturn s\n\t\telif typ == pycparser.c_ast.Decl:\n\t\t\treturn self._generate_decl(n.type)\n\t\telif typ == pycparser.c_ast.Typename:\n\t\t\treturn self._generate_type(n.type)\n\t\telif typ == pycparser.c_ast.IdentifierType:\n\t\t\treturn ' '.join(n.names) + ' '\n\t\telif typ in (pycparser.c_ast.ArrayDecl, pycparser.c_ast.PtrDecl, pycparser.c_ast.FuncDecl):\n\t\t\treturn self._generate_type(n.type, modifiers + [n])\n\t\telse:\n\t\t\treturn self.visit(n)\n\n\n\n\t\tdef visit_Compound(self, n):\n\t\t\tself.__visitingCompound += 1\n\t\t\ts = super(self.__class__, self).visit_Compound(n)\n\t\t\tfor key in self.newIDs: #S: remove pairs that have been added in this compound\n\t\t\t\tstack = self.newIDs[key] \n\t\t\t\tif stack and stack[-1][1] == self.__visitingCompound: \n\t\t\t\t\tstack.pop()\n\t\t\tself.__visitingCompound -= 1\n\t\t\treturn s", "def testTypeProperties(self):\n cmisClient = CmisClient(self.url, self.user, self.pwd,\n binding=self.binding,\n **self.ext_args)\n repo = cmisClient.getDefaultRepository()\n docTypeDef = repo.getTypeDefinition('cmis:document')\n assert 'cmis:document' == docTypeDef.getTypeId()\n props = docTypeDef.getProperties().values()\n assert len(props) > 0\n for prop in props:\n if prop.queryable:\n assert prop.queryName\n assert prop.propertyType", "def _get_python_prop_type(prop_type: Type[Variable]) -> str:\n if prop_type is VariableBool:\n return \"bool\"\n if prop_type in (VariableInt, VariableUInt):\n return \"int\"\n if prop_type is VariableFloat:\n return \"float\"\n if prop_type is VariableString:\n return \"bytes\"\n if prop_type is VariableVec2:\n return \"(float, float)\"\n if prop_type is VariableStruct:\n return \"dict[str, Variable]\"\n if prop_type is VariableArray:\n return \"MutableSequence\"\n raise TypeError(\"unexpected variable type\")", "def genType(self, typeinfo, name, alias):\n OutputGenerator.genType(self, typeinfo, name, alias)\n\n typeElem = typeinfo.elem\n # If the type is a struct type, traverse the embedded <member> tags\n # generating a structure. Otherwise, emit the tag text.\n category = typeElem.get('category')\n\n # Add a typeCategory{} entry for the category of this type.\n self.addName(self.typeCategory, name, category)\n\n if category in ('struct', 'union'):\n self.genStruct(typeinfo, name, alias)\n else:\n if alias:\n # Add name -> alias mapping\n self.addName(self.alias, name, alias)\n\n # Always emit an alias (?!)\n count = 1\n\n # May want to only emit full type definition when not an alias?\n else:\n # Extract the type name\n # (from self.genOpts). Copy other text through unchanged.\n # If the resulting text is an empty string, do not emit it.\n count = len(noneStr(typeElem.text))\n for elem in typeElem:\n count += len(noneStr(elem.text)) + len(noneStr(elem.tail))\n\n if count > 0:\n if category == 'bitmask':\n requiredEnum = typeElem.get('requires')\n self.addName(self.flags, name, requiredEnum)\n\n # This happens when the Flags type is defined, but no\n # FlagBits are defined yet.\n if requiredEnum is not None:\n self.addMapping(name, requiredEnum)\n elif category == 'enum':\n # This case does not seem to come up. It nominally would\n # result from\n # <type name=\"Something\" category=\"enum\"/>,\n # but the output generator does not emit them directly.\n self.logMsg('warn', 'ScriptOutputGenerator::genType: invalid \\'enum\\' category for name:', name)\n elif category == 'funcpointer':\n self.funcpointers[name] = None\n elif category == 'handle':\n self.handles[name] = None\n elif category == 'define':\n self.defines[name] = None\n elif category == 'basetype':\n # Do not add an entry for base types that are not API types\n # e.g. an API Bool type gets an entry, uint32_t does not\n if self.apiName(name):\n self.basetypes[name] = None\n self.addName(self.typeCategory, name, 'basetype')\n else:\n self.logMsg('diag', 'ScriptOutputGenerator::genType: unprocessed type:', name, 'category:', category)\n else:\n self.logMsg('diag', 'ScriptOutputGenerator::genType: unprocessed type:', name)", "def render_subtypes(spec_catalog, data_type, prefix=None):\n subtypes = spec_catalog.get_subtypes(data_type)\n if len(subtypes) == 0:\n return None\n re = prefix if prefix is not None else \"\"\n re += \", \".join([RSTDocument.get_reference(RSTSectionLabelHelper.get_section_label(ct), ct)\n for ct in subtypes])\n return re", "def type(name):", "def buildTypedefXmlDeclaration(self, title=0, offset=1, size=None):\n dec = \"<Paragraph>\"\n if len(self.name) == 0:\n dec = \"Aliases an anonymous enumeration.\"\n else:\n dec = \"Aliases an enumeration, called: '\"+self.name+\"'.\"\n dec += \"</Paragraph>\\n\"\n dec += \"<Paragraph>\"+self.info+\"</Paragraph>\\n\"\n dec += \"<table border=1 cellpadding=5>\\n\"\n for entry in self.entries:\n dec += entry.buildFullInfoDeclaration()\n dec += \"</table>\\n\"\n return dec", "def listFeaturableContentTypes():", "def types():\n sql = \"\"\"SELECT DISTINCT sample_type\n FROM barcodes.sample\n ORDER BY sample_type\"\"\"\n with pm.sql.TRN:\n pm.sql.TRN.add(sql)\n return pm.sql.TRN.execute_fetchflatten()", "def typedef(typedefs):\n\n\n for d in typedefs:\n\n\n type = map_type(d[\"type\"])\n typedef = d[\"typedef\"]\n\n MAPPINGS[typedef] = type", "def getPrettyType(self):\n s = self.sym\n if self.sym == None:\n s = self.define\n return \"Typedef (alias type: %s)\" % s.getType()", "def describe_type(group, type_counter, describe=True):\n docs = conll04_parser.get_docs(group)\n count = {}\n for document in docs:\n cnt = type_counter(document)\n for key in cnt:\n if key not in count:\n count[key] = cnt[key]\n else:\n count[key] += cnt[key]\n if describe:\n print(\"Description of type in\", group)\n print(\"Total:\", sum(count.values()))\n for key in count:\n print(key, \":\", count[key])\n sns.barplot(list(count.values()), list(count.keys()))\n plt.show()\n # Return a map from entities to corresponding encoding numbers\n return dict(zip(count.keys(), range(len(count))))", "def magic_whos(self, parameter_s=''):\n \n varnames = self.magic_who_ls()\n if not varnames:\n print 'Interactive namespace is empty.'\n return\n\n # if we have variables, move on...\n\n # for these types, show len() instead of data:\n seq_types = [types.DictType,types.ListType,types.TupleType]\n\n # Find all variable names and types so we can figure out column sizes\n get_vars = lambda i: self.locals[i]\n type_name = lambda v: type(v).__name__\n varlist = map(get_vars,varnames)\n typelist = map(type_name,varlist)\n # column labels and # of spaces as separator\n varlabel = 'Variable'\n typelabel = 'Type'\n datalabel = 'Data/Length'\n colsep = 3\n # find the size of the columns to format the output nicely\n varwidth = max(max(map(len,varnames)), len(varlabel)) + colsep\n typewidth = max(max(map(len,typelist)), len(typelabel)) + colsep\n # table header\n print varlabel.ljust(varwidth) + typelabel.ljust(typewidth) + \\\n datalabel+'\\n' + '-'*(varwidth+typewidth+len(datalabel))\n # and the table itself\n for vname,var,vtype in zip(varnames,varlist,typelist):\n print itpl(\"$vname.ljust(varwidth)$vtype.ljust(typewidth)\"),\n if vtype in seq_types:\n print len(var)\n else:\n vstr = str(var)\n if len(vstr) < 50:\n print vstr\n else:\n printpl('$vstr[:20]<...>$vstr[-20:]')", "def etypes(self): # -> list[str]:\n ...", "def readOtherTypes(self):\n types = {}\n for m in re.finditer(\"TYPE (\\w*) = (.*);\", self.data):\n typename, type_string = m.groups() \n if typename not in self.types.keys():\n types[typename] = type_string\n \n return types", "def schema_view(request):\n generator = schemas.SchemaGenerator(title='Experiment Data Depot')\n return response.Response(generator.get_schema(request=request))", "def types(self, lang, vocid=None):\n\n if vocid is not None:\n url = self.api_base + vocid + '/types'\n else:\n url = self.api_base + 'types'\n payload = {'lang': lang}\n req = requests.get(url, params=payload)\n req.raise_for_status()\n return req.json()['types']", "def print_datatypes(model: nn.Module, model_name: str, sep: str = \"\\n\") -> None:\n log = model_name + \"'s datatypes:\" + sep\n log += sep.join(str(t) for t in model_utils.get_model_tensor_datatype(model))\n logger.info(log)", "def output_meta_types(self, inputs=None):\n raise NotImplementedError()", "def TypeSpecs(self) -> Dict[str, tf.TypeSpec]:\n return self._type_specs", "def compile_var_dec(self):\n\n\t\txml = '<varDec>\\n' + self.tokenizer.keyword()\n\t\t\n\t\tif self.tokenizer.get_token() in ['int', 'boolean', 'char']:\n\t\t\txml += self.tokenizer.keyword()\t\n\t\telse: \n\t\t\txml += self.tokenizer.identifier()\n\n\t\txml += self.tokenizer.identifier()\n\n\t\tself.outfile.write(xml)\n\n\t\twhile self.tokenizer.get_token() == ',':\n\t\t\txml = self.tokenizer.symbol() + self.tokenizer.identifier()\n\t\t\tself.outfile.write(xml)\n\n\t\tself.outfile.write(self.tokenizer.symbol() + '</varDec>\\n')\n\n\t\tif self.tokenizer.get_token() == 'var':\n\t\t\tself.compile_var_dec()", "def DocumentType(self, default=[None]):\n return self.data.get('metadata', {}).get('document_type', default)", "def __repr__(self):\n return ''.joint(\"A corpus made of \", len(self._corpus),\n \" with types: \", [type(item) for item in self._corpus])", "def _CreateSchemas(self) -> None:\n self.schema_objs = dict() # Holds OpenAPI representations of types.\n\n # Add the OpenAPI schemas of protobuf primitive types.\n primitive_type_schemas = {\n primitive_type[\"name\"]: primitive_type[\"schema\"]\n for primitive_type in primitive_types.values()\n }\n self.schema_objs.update(\n cast(Dict[str, Dict[str, str]], primitive_type_schemas))\n # Add the OpenAPI schemas of the statically described RDF types.\n self.schema_objs.update(rdf_type_schemas)\n\n # Holds state of type extraction (white/gray nodes).\n visiting: Set[str] = set()\n self._CreateRouterMethodSchemas(visiting)\n self._CreateFlowSchemas(visiting)", "def visit_Typedef(self, node):\n return str_node(node)", "def type_count():\n types = []\n for typ in Statistics.all_type():\n types.append({'label': typ.lower(), 'y': Statistics.type_count(typ)})\n fix_types = []\n for i in sorted(types, key=lambda k: k['y']):\n if i['y'] != 0:\n fix_types.append(i)\n return jsonify(result=fix_types)" ]
[ "0.61465466", "0.60761213", "0.5973325", "0.5913389", "0.5776444", "0.5673241", "0.56731915", "0.56657207", "0.56385726", "0.5612472", "0.55503535", "0.5506323", "0.5504591", "0.5486256", "0.5480814", "0.5403671", "0.54033583", "0.54031324", "0.5394926", "0.53930014", "0.53302836", "0.52671677", "0.5250262", "0.52499616", "0.5247999", "0.52394456", "0.52360415", "0.52296555", "0.5216996", "0.52139217", "0.52107847", "0.5209083", "0.5205162", "0.520502", "0.5202873", "0.51913846", "0.5179961", "0.51741153", "0.5165966", "0.5165379", "0.515366", "0.5152519", "0.5143899", "0.5137893", "0.51284724", "0.51230645", "0.51151955", "0.51065624", "0.5076497", "0.5076259", "0.5070854", "0.50633675", "0.50633353", "0.5056696", "0.50527465", "0.5049959", "0.5047629", "0.5027842", "0.50219816", "0.5014788", "0.5009459", "0.5006782", "0.50025874", "0.50020075", "0.49819207", "0.4978774", "0.49757254", "0.49712273", "0.4963506", "0.49572623", "0.49563614", "0.4952992", "0.49494106", "0.49427336", "0.49405622", "0.49394006", "0.49330324", "0.4924349", "0.4915003", "0.491163", "0.49013692", "0.4893269", "0.48898703", "0.488943", "0.48894176", "0.48885605", "0.488583", "0.4878907", "0.48748565", "0.48695874", "0.48664156", "0.48601428", "0.48519468", "0.48513877", "0.48477814", "0.48477185", "0.4841967", "0.4838374", "0.482926", "0.48246005" ]
0.6633607
0
Main method of the command.
def handle(self, *args, **options): self.create_indices() self.bulk()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main(args):", "def main(args):", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():\n pass", "def main(self) -> None:\n pass", "def main():\n\tcli = Cli()\n\tcli.run()", "def main(args=None):", "def main(args=None):", "def cli() -> None:", "def cli() -> None:", "def cli():\r\n pass", "def main():\n pass", "def cli():\n\n pass", "def cli(ctx):", "def cli(ctx):", "def main():\n return", "def main() -> None:", "def main() -> None:", "def main() -> None:", "def main() -> None:", "def main(ctx, verbose):\n return", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main(self):\r\n pass", "def main_cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass", "def cli():\n pass" ]
[ "0.78725755", "0.78725755", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.78080964", "0.77995914", "0.77431893", "0.771157", "0.7703083", "0.7703083", "0.76762205", "0.76762205", "0.765161", "0.75784516", "0.75681025", "0.75480926", "0.75480926", "0.7545095", "0.75408196", "0.75408196", "0.75408196", "0.75408196", "0.7498457", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.7490335", "0.74803215", "0.74622786", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074", "0.74506074" ]
0.0
-1
Create needed indices in Elasticsearch.
def create_indices(self) -> None: self.client.indices.create( index="business", body=BUSINESS_MAPPINGS ) self.client.indices.create( index="review", body=REVIEW_MAPPINGS ) self.client.indices.create( index="tip", body=TIP_MAPPINGS )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_indices():\n destroy_indices()\n\n ActionDocument._index.create(ignore=[400, 404])\n ClassificationDocument._index.create(ignore=[400, 404])\n FunctionDocument._index.create(ignore=[400, 404])\n PhaseDocument._index.create(ignore=[400, 404])\n RecordDocument._index.create(ignore=[400, 404])\n\n yield\n\n destroy_indices()", "def setup(self):\n collection = self._get_collection()\n\n indices = copy(self.params[\"indices\"])\n\n if \"when\" not in indices:\n indices[\"when\"] = {}\n\n for index in indices:\n self.log(DEBUG, \"Ensuring we have index for {}\".format(index))\n\n options = indices[index]\n collection.create_index(index, *options)\n self.log(DEBUG, \"Done.\")", "def _create_indices(cls):\r\n from thunderdome.connection import _hosts, _index_all_fields, create_key_index\r\n \r\n if not _hosts: return\r\n for column in cls._columns.values():\r\n if column.index or _index_all_fields:\r\n create_key_index(column.db_field_name)", "def create_indexes(self) -> None:\n self.collection.create_index(\"traceId\")\n self.collection.create_index(\"process.serviceName\")", "def create_index():\n es = Elasticsearch()\n es.indices.delete(index='drugaid-index')\n with open('../data_crawling/demo_drug_data-fixed.json') as f:\n drug_data = json.load(f)\n\n for drug in drug_data:\n es.index(index='drugaid-index', doc_type='drug', body=drug)", "def create_index():", "def create_index(self):\n self.send_robust(self.es_index, data=self.es_meta)\n self.set_index_normal_settings()", "def _es_push_indexes(self, content):\n for c in self.es_clients:\n c.create_index(content)", "def _create_indexes(self):\r\n # WARNING: The collection will be locked during the index\r\n # creation. If the collection has a large number of\r\n # documents in it, the operation can take a long time.\r\n\r\n # TODO: The creation of indexes can be moved to a Django\r\n # management command or equivalent. There is also an option to\r\n # run the indexing on the background, without locking.\r\n self.collection.ensure_index([('time', pymongo.DESCENDING)])\r\n self.collection.ensure_index('event_type')", "def handle(self, *args, **options):\n self.create_indices()\n self.bulk()", "def setUp(self):\n body = {\n \"settings\": {\n \"number_of_shards\": 1,\n \"number_of_replicas\": 0\n }\n }\n self.assertTrue(self.es.create_index('contacts_esclient_test', body))\n self.assertFalse(self.es.create_index('contacts_esclient_test', body))\n\n self.assertTrue(self.es.create_index('contacts_esclient_test2', body))\n self.assertFalse(self.es.create_index('contacts_esclient_test2', body))\n\n\n \"\"\" Index some test data \"\"\"\n data = {\"name\": \"Joe Tester\",\"age\": 21, \"sex\": \"male\"}\n self.assertTrue(self.es.index(\"contacts_esclient_test\", \"person\", body=data,\n docid=1))\n data = {\"name\": \"Joe Schmoe\",\"age\": 17, \"sex\": \"male\"}\n self.assertTrue(self.es.index(\"contacts_esclient_test\", \"person\", body=data,\n docid=2))\n\n self.assertTrue(self.es.refresh('contacts_esclient_test'))", "def create_index(self):\n\n indice = client.IndicesClient(self.es)\n\n if not indice.exists(self.es_main_index):\n indice.create(\n index=self.es_main_index\n )\n\n return True", "def create_index(self):\r\n\r\n #### Begin functionality here\r\n\r\n return()", "def init(self):\n self._es.create_index_template(\n name=DATASETS_INDEX_NAME,\n template=DATASETS_INDEX_TEMPLATE,\n force_recreate=True,\n )\n self._es.create_index(DATASETS_INDEX_NAME)", "def createall_main(graph):\n parser = ArgumentParser()\n parser.add_argument(\"--only\", action=\"append\")\n parser.add_argument(\"--skip\", action=\"append\")\n parser.add_argument(\"-D\", \"--drop\", action=\"store_true\")\n args = parser.parse_args()\n\n graph.elasticsearch_index_registry.createall(\n force=args.drop,\n only=args.only,\n skip=args.skip,\n )", "def create_index(index_name):\n resp = es.indices.create(index=index_name)\n print(resp)", "def create_index(self):\n if self.index_exists():\n logger.info('Index {} already exists'.format(self.index_name))\n logger.info('Deleting existing index')\n self.indices_client.delete(index=self.index_name)\n self.create_index_if_not_exist()", "def create_index(es_object, index_name):\n created = False\n # index settings\n # the reason why we need mappings is avoid corrupting your data\n settings = {\n \"settings\": {\n \"number_of_shards\": 1,\n \"number_of_replicas\": 0\n },\n \"mappings\": {\n # custom type called foods\n \"foods\": {\n \"properties\": {\n # Specify that the food_name field contains text values.\n \"food_name\": {\n \"type\": \"text\",\n \"fields\": {\n \"raw\":{ \n \"type\": \"keyword\"\n } # The food_name.raw field can be used for sorting and aggregations\n }\n },\n # Specify that the categories field contains text values.\n \"categories\": {\n \"type\": \"text\",\n \"fields\": {\n \"raw\":{ \n \"type\": \"keyword\"\n } # The categories.raw field can be used for sorting and aggregations\n }\n },\n # Specify that the calories field contains integer values.\n \"calories\": {\n \"type\": \"integer\"\n },\n \"protein\": {\n \"type\": \"integer\"\n },\n \"carbs\": {\n \"type\": \"integer\"\n },\n \"fat\": {\n \"type\": \"integer\"\n }\n \n },\n }\n }\n }\n try:\n if not es_object.indices.exists(index_name):\n # Ignore 400 means to ignore \"Index Already Exist\" error.\n es_object.indices.create(index=index_name, ignore=400, body=settings)\n print('Created Index')\n created = True\n except Exception as ex:\n print(str(ex))\n finally:\n return created", "def configure_index(client):\n index_name = client.index + \"-\" + _random_id()\n mappings = ANNOTATION_MAPPING\n\n if client.server_version < Version(\"7.0.0\"):\n mappings = {client.mapping_type: mappings}\n\n client.conn.indices.create(\n index_name,\n body={\n \"mappings\": mappings,\n \"settings\": {\"analysis\": ANALYSIS_SETTINGS},\n },\n )\n\n return index_name", "def createindexes():\n index = [{}, {}, {}, {}]\n readcorpus(index)\n buildindex4(index[2], index[3])\n writeindextofile(index)\n return index", "def build_index(self):\n self.rebuild_index()", "def create_index(es_object, index_name):\n created = False\n \"\"\" index settings \"\"\"\n settings = {\n \"settings\": {\n \"number_of_shards\": 1,\n \"number_of_replicas\": 0\n },\n \"mappings\": {\n \"physicians\": {\n \"dynamic\": \"strict\",\n \"properties\": {\n \"overview\": {\n \"type\": \"text\"\n },\n \"full_name\": {\n \"type\": \"text\"\n },\n \"years_of_practice\": {\n \"type\": \"text\"\n },\n \"language\": {\n \"type\": \"text\"\n },\n \"office_location\": {\n \"type\": \"text\"\n },\n \"hospital_affiliation\": {\n \"type\": \"text\"\n },\n \"specialties\": {\n \"type\": \"text\"\n },\n \"education_and_medical_training\": {\n \"type\": \"text\"\n },\n \"certification_and_licensure\": {\n \"type\": \"text\"\n },\n }\n }\n }\n }\n\n try:\n if not es_object.indices.exists(index_name):\n # Ignore 400 means to ignore \"Index Already Exist\" error.\n es_object.indices.create(index=index_name, ignore=400, body=settings)\n print('Created Index')\n created = True\n except Exception as ex:\n print(str(ex))\n finally:\n return created", "def create_indexes(create_func):\n\tfor set_name, index_path, index_name in zip(SET_NAMES, INDEX_PATHS, INDEX_NAMES):\n\t\tcreate_func(set_name, index_path, index_name)", "def build_index():\n pass", "def es_index(data):\n doc_type = data.get('service')\n es.index(index=INDEX, doc_type=doc_type, body=data)", "def __create_new_index(self, index, index_id, begin_timestamp):\n index_name = index + \"_\" + str(index_id)\n target_config = index + \".target_config\"\n pipeline_id = index + \".pipeline\"\n mapping = {\n \"user_id\": {\"type\": \"long\"},\n \"client_id\": {\"type\": \"long\"},\n \"partner_id\": {\"type\": \"long\"},\n \"module\": {\"type\": \"keyword\"},\n \"page\": {\"type\": \"keyword\"},\n \"uri\": {\"type\": \"keyword\"},\n \"app_type\": {\"type\": \"keyword\"},\n \"created_at\": {\"type\": \"date\"},\n \"request_time\": {\"type\": \"date\"},\n \"duration\": {\"type\": \"long\"},\n }\n body = {\n \"settings\": {\n \"number_of_shards\": 1,\n \"number_of_replicas\": 1,\n },\n \"mappings\": {\n \"docs\": {\n \"properties\": mapping\n }\n },\n }\n self.client.indices.create(index=index_name, body=body)\n self.client.index(\n index=target_config,\n doc_type=\"config\",\n id=index_id,\n body={\"index_id\": index_id, \"begin_timestamp\": begin_timestamp},\n )\n\n return index_id", "def create_index(self, index_name, body):\n if self.es.indices.exists(index_name):\n print(\"deleting '%s' index...\" % index_name)\n res = self.es.indices.delete(index=index_name)\n print(\" response: '%s'\" % res)\n\n print(\"creating '%s' index...\" % index_name)\n res = self.es.indices.create(index=index_name, body=body)\n print(\" response: '%s'\" % res)", "def _set_es_workers(self, **kwargs):\n def make_es_worker(search_conn, es_index, es_doc_type, class_name):\n \"\"\"\n Returns a new es_worker instance\n\n args:\n -----\n search_conn: the connection to elasticsearch\n es_index: the name of the elasticsearch index\n es_doc_type: the name of the elasticsearch doctype\n class_name: name of the rdf class that is being indexed\n \"\"\"\n new_esbase = copy.copy(search_conn)\n new_esbase.es_index = es_index\n new_esbase.doc_type = es_doc_type\n log.info(\"Indexing '%s' into ES index '%s' doctype '%s'\",\n class_name.pyuri,\n es_index,\n es_doc_type)\n return new_esbase\n\n def additional_indexers(rdf_class):\n \"\"\"\n returns additional classes to index based off of the es definitions\n \"\"\"\n rtn_list = rdf_class.es_indexers()\n rtn_list.remove(rdf_class)\n return rtn_list\n\n\n self.es_worker = make_es_worker(self.search_conn,\n self.es_index,\n self.es_doc_type,\n self.rdf_class.__name__)\n if not kwargs.get(\"idx_only_base\"):\n self.other_indexers = {item.__name__: make_es_worker(\n self.search_conn,\n item.es_defs.get('kds_esIndex')[0],\n item.es_defs.get('kds_esDocType')[0],\n item.__name__)\n for item in additional_indexers(self.rdf_class)}\n else:\n self.other_indexers = {}", "def prepare_environment():\n elastic_search = Elasticsearch('{}:{}'.format(\n _CONFIG.elastic.elastic_hostname,\n _CONFIG.elastic.elastic_port))\n try:\n if not elastic_search.indices.exists(_CONFIG.elastic.elastic_index):\n elastic_search.indices.create(\n index=_CONFIG.elastic.elastic_index,\n body=_CONFIG.elastic.metadata_index_setup)\n except ConnectionError:\n sys.exit(\"Can't start because of no connection to ElasticSearch.\")", "def _initIndexes(self):\n class Record:\n \"\"\" a moron simple object for carrying the 'extra'-payload to index\n constructors\n \"\"\"\n def __init__(self, **kw):\n self.__dict__.update(kw)\n\n addIndex = self.addIndex\n addColumn = self.addColumn\n\n # Content indexes\n self._catalog.indexes.clear()\n for (index_name, index_type, extra) in self.enumerateIndexes():\n if extra is None:\n addIndex( index_name, index_type)\n else:\n if isinstance(extra, StringTypes):\n p = Record(indexed_attrs=extra)\n elif isinstance(extra, DictType):\n p = Record(**extra)\n else:\n p = Record()\n addIndex( index_name, index_type, extra=p )\n\n # Cached metadata\n self._catalog.names = ()\n self._catalog.schema.clear()\n for column_name in self.enumerateColumns():\n addColumn( column_name )", "def es():\n # pylint: disable=invalid-name\n\n client = Elasticsearch(ES_TEST_HOSTS)\n client.indices.create(index=ES_TEST_INDEX)\n yield client\n client.indices.delete(index=ES_TEST_INDEX)", "def create_index(cls, engine):\n\n reg_imei = db.Index('reg_imei_index', cls.imei, postgresql_concurrently=True)\n reg_imei.create(bind=engine)\n\n reg_normalized_imei = db.Index('reg_normalized_imei_index', cls.normalized_imei, postgresql_concurrently=True)\n reg_normalized_imei.create(bind=engine)", "def create_elastic_indices():\n # initial values\n alpha, alpha2, sigma = 10, 10, 50\n shape = (96, 288) # same as shape of input images\n x_mesh, y_mesh = np.meshgrid(np.arange(shape[1]), np.arange(shape[0]))\n\n # below is used once per epoch for the elastic deformation\n g_1d = signal.gaussian(300, sigma)\n kernel_deform = np.outer(g_1d, g_1d)\n dx = signal.fftconvolve(np.random.rand(*shape) * 2 - 1, kernel_deform, mode='same')\n dy = signal.fftconvolve(np.random.rand(*shape) * 2 - 1, kernel_deform, mode='same')\n dx = alpha * (dx - np.mean(dx)) / np.std(dx)\n dy = alpha2 * (dy - np.mean(dy)) / np.std(dy)\n indices_x, indices_y = x_mesh + dx, y_mesh + dy\n indices_x_clipped = np.clip(indices_x, a_min=0, a_max=shape[1] - 1)\n indices_y_clipped = np.clip(indices_y, a_min=0, a_max=shape[0] - 1)\n return indices_x_clipped, indices_y_clipped", "def get_indices():\n indices = requests.get(\"http://\"+ __srchost__ + \":9200/_stats\").json()['_all']['indices'].keys()\n return indices", "def _ensure_es_index(self, index):\n if not self.elasticsearch.indices.exists(index):\n try:\n self.elasticsearch.indices.create(index=index)\n except TransportError as error_msg:\n self.logger.error(str(error_msg.error))\n return False\n self.logger.info('Created Index: %s', index)\n\n return True", "def setup_mapping():\n # Mapping describes how elasticsearch handles a document during indexing.\n # Most fields are detected and mapped automatically.\n m = {\n # Turn off analysis on name so we can sort by it.\n 'name': {'index': 'not_analyzed', 'type': 'string'},\n }\n es = elasticutils.get_es()\n try:\n es.create_index_if_missing(settings.ES_INDEX)\n es.put_mapping(Addon._meta.app_label, {'properties': m},\n settings.ES_INDEX)\n except pyes.ElasticSearchException:\n pass", "def setup_index_shards_per_node(cluster: str, index: str, number: int):\n\n elastic = sreElastic(host=cluster)\n elastic.set_number_shards_per_node(index=index, number=number)", "def indexDocsToES(indexName):\n docs = pd.DataFrame()\n try:\n docs = pd.read_csv(\"documents/full_news_documents.csv\")\n except FileNotFoundError:\n print(\"File not found\")\n\n use_these_keys = ['id', 'category', 'headline', 'authors', 'link', 'short_description', 'date', 'body']\n helpers.bulk(es_client, doc_generator(docs, indexName, use_these_keys))", "def create(\n self,\n index: IO,\n request_options: Optional[_models.RequestOptions] = None,\n *,\n content_type: str = \"application/json\",\n **kwargs: Any\n ) -> _models.SearchIndex:", "def build_index(self):\n \n \n geoids = self.partitions.find_or_new(table='facilities_geoids')\n addresses = self.partitions.find_or_new(table='facilities_addresses')\n facilities = self.partitions.find(table='facilities')\n \n facilities.attach(addresses,'addresses')\n facilities.attach(geoids,'geoids')\n \n q = \"\"\"\n SELECT year, type, oshpd_id, facility_name, dba_city, dba_zip_code, blockgroup_gvid, tract_gvid, county_gvid\n FROM facilities\n JOIN geoids.facilities_geoids AS geoids ON geoids.facilities_id = facilities.id\n JOIN addresses.facilities_addresses AS addresses ON addresses.facilities_id = facilities.id\n \"\"\"\n \n p = self.partitions.find_or_new(table='facilities_index')\n p.clean()\n lr = self.init_log_rate()\n \n with p.inserter() as ins:\n for row in facilities.query(q):\n ins.insert(row)\n lr(str(p.identity))", "def rebuild_all_indexes():\n response = _get_lambda_client().invoke(\n FunctionName=indexer_function_name,\n InvocationType=\"Event\",\n )", "def create_index(\n self, index: str, force_recreate: bool = False, mappings: Dict[str, Any] = None\n ):\n if force_recreate:\n self.delete_index(index)\n if not self.index_exists(index):\n self.__client__.indices.create(\n index=index, body={\"mappings\": mappings or {}}\n )", "def _SetupIndexes(self, _open=open):\n pass", "def publish_impl(self) -> None:\n\n LOGGER.warn('ElasticsearchPublisher is being deprecated in favor of using SearchMetadatatoElasticasearchTask\\\n which publishes ES metadata with mappings compatible with amundsensearch >= 4.0.0')\n\n actions = [json.loads(line) for line in self.file_handler.readlines()]\n # ensure new data exists\n if not actions:\n LOGGER.warning(\"received no data to upload to Elasticsearch!\")\n return\n\n # Convert object to json for elasticsearch bulk upload\n # Bulk load JSON format is defined here:\n # https://www.elastic.co/guide/en/elasticsearch/reference/6.2/docs-bulk.html\n bulk_actions = []\n cnt = 0\n\n # create new index with mapping\n self.elasticsearch_client.indices.create(index=self.elasticsearch_new_index, body=self.elasticsearch_mapping)\n\n for action in actions:\n index_row = dict(index=dict(_index=self.elasticsearch_new_index))\n action['resource_type'] = self.elasticsearch_type\n\n bulk_actions.append(index_row)\n bulk_actions.append(action)\n cnt += 1\n if cnt == self.elasticsearch_batch_size:\n self.elasticsearch_client.bulk(bulk_actions)\n LOGGER.info('Publish %i of records to ES', cnt)\n cnt = 0\n bulk_actions = []\n\n # Do the final bulk actions\n if bulk_actions:\n self.elasticsearch_client.bulk(bulk_actions)\n\n # fetch indices that have {elasticsearch_alias} as alias\n elasticsearch_old_indices = self._fetch_old_index()\n\n # update alias to point to the new index\n actions = [{\"add\": {\"index\": self.elasticsearch_new_index, \"alias\": self.elasticsearch_alias}}]\n\n # delete old indices\n delete_actions = [{\"remove_index\": {\"index\": index}} for index in elasticsearch_old_indices]\n actions.extend(delete_actions)\n\n update_action = {\"actions\": actions}\n\n # perform alias update and index delete in single atomic operation\n self.elasticsearch_client.indices.update_aliases(update_action)", "def connection_es(): \n client = Elasticsearch([{'host': 'localhost', 'port': 9200}])\n index_name = \"fs_metadata_\"\n return client, index_name", "def create_index():\n try:\n client = MongoClient(MONGO_URI,event_listeners=[CommandLogger()])\n db = client.get_database('UNSD')\n \n coll_unfcc = db.get_collection('unfcc')\n coll_ebal = db.get_collection('ebal')\n result_unfcc = coll_unfcc.create_index([('REF_AREA',ASCENDING),('TIME_PERIOD',DESCENDING)])\n result_ebal = coll_ebal.create_index([('REF_AREA',ASCENDING),('TIME_PERIOD',DESCENDING)])\n except pymongo.errors.ConnectionFailure as e:\n logger.error('PyMongo error ConnectionFailure seen: ' + str(e))\n traceback.print_exc(file = sys.stdout)", "def indices(self) -> list:\n endpoint = \"/api/indices/\"\n ret = self._request(endpoint=endpoint)\n return ret", "def addCatalogIndexes(portal):\n catalog = getToolByName(portal, 'portal_catalog')\n indexes = catalog.indexes()\n wanted = (('standardTags', 'KeywordIndex'),\n ('iamTags', 'KeywordIndex'),\n ('isearchTags', 'KeywordIndex'),\n ('hiddenTags', 'KeywordIndex'))\n indexables = []\n for name, meta_type in wanted:\n if name not in indexes:\n catalog.addIndex(name, meta_type)\n indexables.append(name)\n logger.info(\"Added %s for field %s.\", meta_type, name)\n if len(indexables) > 0:\n logger.info(\"Indexing new indexes %s.\", ', '.join(indexables))\n catalog.manage_reindexIndex(ids=indexables)", "def _create_indexes(cls, index_type: IndexType, document: dict, condition=None):\n try:\n criteria = [\n (field_name, pymongo.ASCENDING)\n for field_name in cls._get_index_fields(index_type, document, \"\")\n ]\n if criteria:\n # Avoid using auto generated index name that might be too long\n index_name = (\n f\"uidx{cls.__collection_name__}\"\n if index_type == IndexType.Unique\n else f\"idx{cls.__collection_name__}\"\n )\n cls.logger.info(\n f\"Create {index_name} {index_type.name} index on {cls.__collection_name__} using {criteria} criteria.\"\n )\n if condition is None or cls._server_version < \"3.2\":\n cls.__collection__.create_index(\n criteria, unique=index_type == IndexType.Unique, name=index_name\n )\n else:\n try:\n cls.__collection__.create_index(\n criteria,\n unique=index_type == IndexType.Unique,\n name=index_name,\n partialFilterExpression=condition,\n )\n except pymongo.errors.OperationFailure:\n cls.logger.exception(\n f\"Unable to create a {index_type.name} index.\"\n )\n cls.__collection__.create_index(\n criteria,\n unique=index_type == IndexType.Unique,\n name=index_name,\n )\n except pymongo.errors.DuplicateKeyError:\n cls.logger.exception(\n f\"Duplicate key found for {criteria} criteria \"\n f\"when creating a {index_type.name} index.\"\n )\n raise", "def store_index(self, index, doc_type, source_list, init_id):\n\n bulk_actions = []\n doc_id = init_id\n\n for source in source_list:\n data_body = ElasticSearchUtility.__index_data_body(index, doc_type, doc_id, source[\"_source\"])\n bulk_actions.append(data_body)\n doc_id += 1\n\n print 'inserting - ', len(bulk_actions)\n helpers.bulk(self.es, bulk_actions)", "def pypi_indexes() -> IndexesDefinition:\n return {}", "def create_indexes_with_stats(self) -> float:\n query_nodes_per_cluster = self.cluster_spec.servers_by_cluster_and_role('n1ql')\n index_nodes_per_cluster = self.cluster_spec.servers_by_cluster_and_role('index')\n\n t0 = time.time()\n for cluster_query_nodes in query_nodes_per_cluster:\n self.create_indexes(query_node=cluster_query_nodes[0])\n\n # Wait for index build to complete on first cluster, and record time\n logger.info('Waiting for index build on primary cluster')\n self.wait_for_indexing(index_nodes=index_nodes_per_cluster[0])\n index_build_time = time.time() - t0\n logger.info(\"Index build completed in {} sec\".format(index_build_time))\n\n # Wait for index build to complete on remaining clusters\n logger.info('Waiting for index build to complete on remaining clusters')\n remaining_index_nodes = [node for nodes in index_nodes_per_cluster[1:] for node in nodes]\n self.wait_for_indexing(index_nodes=remaining_index_nodes)\n\n return index_build_time", "def test_recreate_index_that_exists(self):\n indices = self.elasticsearch_cls().indices\n indices.exists.return_value = True\n\n index_name = 'abcd'\n self.client._recreate_index(index_name)\n indices.delete.assert_called_once_with(index_name)\n indices.create.assert_called_once_with(index_name)", "def createIndex(self):\n\n super(COCO_PLUS, self).createIndex()\n catNameToId = dict()\n pointclouds = dict()\n imgToPc = dict()\n\n if 'pointclouds' in self.dataset:\n for pc in self.dataset['pointclouds']:\n imgToPc[pc['img_id']] = pc\n pointclouds[pc['id']] = pc\n\n if 'categories' in self.dataset:\n for cat in self.dataset['categories']:\n catNameToId[cat['name']] = cat['id']\n\n self.catNameToId = catNameToId\n self.pointclouds = pointclouds\n self.imgToPc = imgToPc\n self.logger.info('index created.')", "def create(\n self,\n index: _models.SearchIndex,\n request_options: Optional[_models.RequestOptions] = None,\n *,\n content_type: str = \"application/json\",\n **kwargs: Any\n ) -> _models.SearchIndex:", "def build_index(self):\n\t\tix = self.create_index()\n\t\twriter = AsyncWriter(ix)\n\n\t\tfor i, document in enumerate(self.documents):\n\t\t\tif document:\n\t\t\t\twriter.add_document(**document)\n\t\t\tupdate_progress_bar(\"Building Index\", i, len(self.documents))\n\n\t\twriter.commit(optimize=True)", "def create(client, name, index):\n r = client.indices.put_alias(index=index, name=name)\n LOG.info(json.dumps(r))\n ok = r.get(\"acknowledged\")\n if not ok:\n sys.exit(UNKNOWN_ERROR)", "def ensure_indexes(self):\n self.db_connection.ensure_indexes()", "def build_index(self):\n self.create_index()\n logger.debug(f\"Building index with {self.n_trees} trees.\")\n\n for i in range(len(self.corpus_embeddings)):\n self.index.add_item(i, self.corpus_embeddings[i])\n self.index.build(self.n_trees)", "def build(self):\n\t\tself.documents = self.get_items_to_index()\n\t\tself.build_index()", "def init_index(self):\n raise NotImplementedError", "def create_location_index():\n get_rsvps_database().groups.create_index([(\"location\", GEOSPHERE)])", "def _assert_indices_exist(self, catalog: CatalogName):\n es_client = ESClientFactory.get()\n service = IndexService()\n for index_name in service.index_names(catalog):\n self.assertTrue(es_client.indices.exists(index_name))", "def create_index(args, client):\n policy = {}\n client.index_geo2dsphere_create(args.nspace, args.set,\n LOCBIN, LOCNDX, policy)\n client.index_integer_create(args.nspace, args.set,\n HSHBIN, HSHNDX, policy)", "def init_index(clear=False):\n return _run_indexer_func(\"init_index\", clear)", "def indices_client():\n es_connected = False\n while not es_connected:\n try:\n ES = Elasticsearch(\n hosts=[HOSTNAME]\n )\n es_connected = True\n except TransportError as e:\n logging.info('Not yet connected: %s, sleeping for 1s', e)\n time.sleep(1)\n return IndicesClient(ES)", "def bulk_push_to_elastic(elastic_search_url, index, docs):\n CREATE_TEMPLATE = {\"create\": {\"_index\": index, \"_type\": \"_doc\", \"_id\": \"\"}}\n\n bulk_request_body = \"\"\n for doc in docs:\n CREATE_TEMPLATE[\"create\"][\"_id\"] = doc[HASH_ID]\n bulk_request_body += json.dumps(CREATE_TEMPLATE) + NEW_LINE\n bulk_request_body += json.dumps(doc) + NEW_LINE\n\n # Request\n headers = {\"content-type\": \"application/x-ndjson\"}\n\n url = elastic_search_url + \"/\" + \"_bulk\"\n\n response = requests.post(url, data=bulk_request_body, headers=headers)\n return response", "def fix__elasticsearch_index_allocation(cluster: str, index: str):\n\n elastic = sreElastic(host=cluster)\n elastic.index_fix(index=index)", "def create_index(self, db_name):\n\t\tindex_func_path = self._get_index_func_filepath(db_name)\n\t\t\n\t\tif os.path.isfile(index_func_path):\n\t\t\t# create index request payload from predefined file\t\n\t\t\twith open(index_func_path, 'r') as content_file:\n\t\t\t\tpayload = content_file.read()\n\t\t\n\t\t\tprint (\"Create index using function in: {}\".format(index_func_path))\n\t\t\turl = \"https://{}/{}/_design/view\".format(\n\t\t\t\tself.cloudanthost, db_name)\n\t\t\tresponse = self.r.put(url, data=payload)\n\t\t\tassert response.status_code == 201", "def rebuild_index():\n print('Building indexes...')\n print(data_fldr)\n ndx = []\n for root, _, files in os.walk(data_fldr):\n for f in files:\n if f[-3:].upper() in ['CSV','TXT']:\n ndx.extend(get_index_terms(root + os.sep + f))\n with open(ndx_file, 'w') as fio:\n for i in ndx:\n fio.write(i + '\\n')", "def indexCreationStarted(self, *args, **kwargs): # real signature unknown\n pass", "def indexCreated(self, *args, **kwargs): # real signature unknown\n pass", "def set_indices(self, part_instance_counts):\n type_indices = {}\n for entry in self._entries:\n try:\n entry.set_indices(\n model_type_index=type_indices.setdefault(entry.ENTRY_SUBTYPE, 0),\n instance_count=part_instance_counts.get(entry.name, 0),\n )\n except KeyError as e:\n raise SoulstructError(\n f\"Invalid map component name for {entry.ENTRY_SUBTYPE.name} model {entry.name}: {e}\"\n )\n else:\n type_indices[entry.ENTRY_SUBTYPE] += 1", "def create_new_index(self, index_name, value, is_cluster, check=False):\n print(f\"Creating {index_name} index started \\n\")\n add_index = \"/html//i[@id='addIndex']\"\n self.locator_finder_by_xpath(add_index).click()\n time.sleep(2)\n\n print(f\"selecting {index_name} from the list\\n\")\n self.locator_finder_by_select(self.select_index_type_id, value)\n\n if index_name == \"Persistent\":\n self.select_persistent_fields_id = self.locator_finder_by_hover_item_id(self.select_persistent_fields_id)\n time.sleep(1)\n self.select_persistent_fields_id.send_keys(\"pfields\").perform()\n self.select_persistent_name_id = self.locator_finder_by_hover_item_id(self.select_persistent_name_id)\n self.select_persistent_fields_id.send_keys(\"Persistent\").perform()\n time.sleep(1)\n\n if not is_cluster:\n self.select_persistent_unique_id = self.locator_finder_by_hover_item_id(\n self.select_persistent_unique_id\n )\n\n self.select_persistent_sparse_id = self.locator_finder_by_hover_item_id(self.select_persistent_sparse_id)\n self.select_persistent_duplicate_id = self.locator_finder_by_hover_item_id(\n self.select_persistent_duplicate_id\n )\n self.select_persistent_background_id = self.locator_finder_by_hover_item_id(self.select_persistent_background_id)\n time.sleep(1)\n\n elif index_name == \"Geo\":\n self.select_geo_fields_id = self.locator_finder_by_hover_item_id(self.select_geo_fields_id)\n self.select_geo_fields_id.send_keys(\"gfields\").perform()\n time.sleep(1)\n self.select_geo_name_id = self.locator_finder_by_hover_item_id(self.select_geo_name_id)\n self.select_geo_name_id.send_keys(\"Geo\").perform()\n time.sleep(1)\n self.select_geo_json_id = self.locator_finder_by_hover_item_id(self.select_geo_json_id)\n self.select_geo_background_id = self.locator_finder_by_hover_item_id(self.select_geo_background_id)\n time.sleep(1)\n self.wait_for_ajax()\n\n elif index_name == \"Fulltext\":\n self.select_fulltext_field_id = self.locator_finder_by_hover_item_id(self.select_fulltext_field_id)\n self.select_fulltext_field_id.send_keys(\"ffields\").perform()\n time.sleep(1)\n self.select_fulltext_name_id = self.locator_finder_by_hover_item_id(self.select_fulltext_name_id)\n self.select_fulltext_name_id.send_keys(\"Fulltext\").perform()\n time.sleep(1)\n self.select_fulltext_length_id = self.locator_finder_by_hover_item_id(self.select_fulltext_length_id)\n self.select_fulltext_length_id.send_keys(100)\n self.select_fulltext_background_id = self.locator_finder_by_hover_item_id(\n self.select_fulltext_background_id\n )\n time.sleep(1)\n self.wait_for_ajax()\n\n elif index_name == \"TTL\":\n self.select_ttl_field_id = self.locator_finder_by_hover_item_id(self.select_ttl_field_id)\n self.select_ttl_field_id.send_keys(\"tfields\").perform()\n time.sleep(1)\n self.select_ttl_name_id = self.locator_finder_by_hover_item_id(self.select_ttl_name_id)\n self.select_ttl_name_id.send_keys(\"TTL\").perform()\n time.sleep(1)\n self.select_ttl_expiry_id = self.locator_finder_by_hover_item_id(self.select_ttl_expiry_id)\n self.select_ttl_expiry_id.send_keys(1000)\n self.select_ttl_background_id = self.locator_finder_by_hover_item_id(self.select_ttl_background_id)\n time.sleep(1)\n self.wait_for_ajax()\n\n # experimental feature\n elif index_name == 'ZKD':\n if check:\n self.navbar_goto(\"collections\")\n print(\"Selecting computed values collections. \\n\")\n col = '//*[@id=\"collection_ComputedValueCol\"]/div/h5'\n self.locator_finder_by_xpath(col).click()\n self.select_index_menu()\n\n print(f\"Creating {index_name} index started \\n\")\n self.locator_finder_by_xpath(add_index).click()\n time.sleep(2)\n\n print(f\"selecting {index_name} from the list\\n\")\n self.locator_finder_by_select(self.select_index_type_id, 5)\n\n time.sleep(1)\n\n select_zkd_field_sitem = self.locator_finder_by_id('newZkdFields')\n select_zkd_field_sitem.click()\n select_zkd_field_sitem.clear()\n select_zkd_field_sitem.send_keys('x,y')\n time.sleep(1)\n else:\n select_zkd_field_sitem = self.locator_finder_by_id('newZkdFields')\n select_zkd_field_sitem.click()\n select_zkd_field_sitem.clear()\n select_zkd_field_sitem.send_keys('zkdfileds')\n time.sleep(1)\n\n select_zkd_name_sitem = self.locator_finder_by_id('newZkdName')\n select_zkd_name_sitem.click()\n select_zkd_name_sitem.clear()\n select_zkd_name_sitem.send_keys('ZKD')\n time.sleep(1)\n\n select_create_index_btn_id = \"createIndex\"\n self.locator_finder_by_id(select_create_index_btn_id).click()\n time.sleep(10)\n self.webdriver.refresh()\n\n if check:\n self.navbar_goto(\"collections\")\n self.select_collection(\"TestDoc\")\n self.select_index_menu()\n\n print(f\"Creating {index_name} index completed \\n\")", "def create_module_index(self):\n if self.path and self.rootnode and self.rootnode.model:\n self.index_module = create_index(\n self.rootnode.model.ref, self.path)\n\n if not self.index_module:\n ok_error('Sorry, no index found for ' + self.name)\n\n else:\n ok('Index is ready for {} ({} entries)'.format(\n self.name, len(self.index_module)))", "def build_index(self):\r\n date_time('Building indexes in citations table')\r\n self.cursor.execute('DROP INDEX IF EXISTS IDX_citations ;')\r\n self.cursor.execute('CREATE INDEX IDX_citations ON citations (citation);')\r\n self.conn.commit()\r\n gc.collect()", "def create_index(collection, index):\n db[collection].create_index(index)", "def es_index(project=None):\n if project is not None:\n project = Project.by_slug(project)\n if project is None:\n raise ValueError(\"Project not found.\")\n script_indexer.index_project(project=project)", "def create_index(self, *columns):\n self._assert_columns_exist(columns)\n\n # Build index name.\n whitelist = lambda col: ''.join(x for x in col if x.isalnum())\n idx_name = '_'.join(whitelist(col) for col in columns)\n idx_name = 'idx_{0}_{1}'.format(self._table, idx_name)\n\n # Build column names.\n col_names = [self._normalize_column(x) for x in columns]\n col_names = ', '.join(col_names)\n\n # Prepare statement.\n statement = 'CREATE INDEX IF NOT EXISTS {0} ON {1} ({2})'\n statement = statement.format(idx_name, self._table, col_names)\n\n # Create index.\n cursor = self._connection.cursor()\n cursor.execute(statement)", "def build_index():\n for site in get_sites():\n text = read_site(site)\n while text == False:\n text = read_site(site) # keep attempting to read until successful\n index_site(site, text)", "def build_index():\n for site in get_sites():\n text = read_site(site)\n while text == False:\n text = read_site(site) # keep attempting to read until successful\n index_site(site, text)", "def post(body):\n es = Elasticsearch([ELASTIC_SEARCH_HOST], http_auth=ELASTIC_SEARCH_AUTH, scheme=\"https\", port=ELASTIC_SEARCH_PORT)\n\n # Create Index If not present on host\n if not es.indices.exists('newdata'):\n es.indices.create('newdata')\n\n # Create Document in index\n entry = es.index(index='newdata', doc_type=body[\"data\"][\"type\"], body=body[\"data\"][\"attributes\"])\n response = dict(data=dict(id=entry[\"_id\"], type=entry[\"_type\"], attributes=dict(index=entry[\"_index\"])))\n return response, 201", "def set_in_index(document, index, type):\n response = None\n\n #Try 3 times to store the document in ES, each time picking a random ES node address in case of failure\n for retries in range(3):\n try:\n log('ES Set Request :: ' + json.dumps(document) + ' : ' + index + ':' + type)\n response = es.index(index=index, doc_type=type, id=document['id'], body=document)\n log(\"ES Set Response :: \" + json.dumps(response))\n except ImproperlyConfigured:\n log(\"ES ImproperlyConfigured!\" + traceback.format_exc())\n continue\n except ElasticsearchException:\n log(\"ES ElasticsearchException!\" + traceback.format_exc())\n continue\n except TransportError:\n log(\"ES TransportError!\" + traceback.format_exc())\n continue\n except NotFoundError:\n log(\"ES NotFoundError!\" + traceback.format_exc())\n continue\n except ConflictError:\n log(\"ES ConflictError!\" + traceback.format_exc())\n continue\n except RequestError:\n log(\"ES RequestError!\" + traceback.format_exc())\n continue\n except SerializationError:\n log(\"ES SerializationError!\" + traceback.format_exc())\n continue\n except ConnectionError:\n log(\"ES ConnectionError!\" + traceback.format_exc())\n continue\n except Exception:\n log(\"ES Exception!\" + traceback.format_exc())\n continue\n finally:\n log(\"Total number of ES write attempts: \" + str(retries + 1))\n #Exit for loop if ES transaction is successful otherwise pick another node and continue retrying\n break\n\n if response is None or response == '':\n return 'false'\n else:\n return 'true'", "def create_index(index_name, index_config, client):\n client.create(index=index_name, body=index_config)", "def _Dynamic_CreateIndex(self, index, id_response, request_id=None):\n if index.id() != 0:\n raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,\n 'New index id must be 0.')\n self._RemoteSend(index, id_response, \"CreateIndex\", request_id)\n return id_response", "def add_catalog_indexes(context, logger):\n if logger is None:\n logger = logging.getLogger('bungenicms.membershipdirectory')\n \n # Run the catalog.xml step as that may have defined new metadata columns. \n # We could instead add <depends name=\"catalog\"/> to the registration of our \n # import step in zcml, but doing it in code makes this method usable as \n # upgrade step as well. Note that this silently does nothing when there is \n # no catalog.xml, so it is quite safe.\n setup = getToolByName(context, 'portal_setup')\n setup.runImportStepFromProfile(PROFILE_ID, 'catalog')\n \n catalog = getToolByName(context, 'portal_catalog')\n indexes = catalog.indexes()\n \n # Specify the indexes you want, with ('index_name', 'index_type')\n wanted = (('county', 'FieldIndex'),\n ('constituency', 'FieldIndex'),\n ('priority_number', 'FieldIndex'), \n ('political_party', 'FieldIndex'),\n ('elected_nominated', 'FieldIndex'),\n ('member_status', 'FieldIndex'),\n ('special_interest', 'FieldIndex'),\n ('other_names', 'FieldIndex'),\n ('member_role', 'FieldIndex'),\n ('member_title', 'FieldIndex'),\n ('body_text', 'FieldIndex'),\n ('member_full_names', 'ZCTextIndex'),\n )\n\n indexables = []\n for (name, meta_type) in wanted:\n if meta_type and name not in indexes:\n if meta_type == 'ZCTextIndex':\n item_extras = Empty()\n item_extras.doc_attr = name\n item_extras.index_type = 'Okapi BM25 Rank'\n item_extras.lexicon_id = 'plone_lexicon'\n catalog.addIndex(name, meta_type, item_extras)\n else:\n catalog.addIndex(name, meta_type)\n \n indexables.append(name)\n logger.info('Added %s for field %s.', meta_type, name)\n if len(indexables) > 0:\n logger.info('Indexing new indexes %s.', ', '.join(indexables))\n catalog.manage_reindexIndex(ids=indexables)", "def test_creating_index_type(self):", "def __createIndexFile(self, dimensions):\n target = os.path.join(self.workingDir, self.get( 'index_filename'))\n self.info(\"Creating index file {}\".format(target))\n text = \"\"\n for i in range(0,dimensions):\n text+=\"1 \"\n\n util.createScript(target, text)\n return target", "def create_index(schema, index_name):\n if not os.path.exists(index_name):\n os.mkdir(index_name)\n ix = index.create_in(index_name, schema)\n print(f\"index {index_name} created successfully\")\n return ix", "def _get_ea_index():\n ea_index_temp = {'Address': 5, 'Agency': 10, 'City': 4, 'Country': 3,\n 'Datacenter': 7, 'Division': 8, 'Interface Name': 13,\n 'Region_List': 2, 'Requester Email': 9, 'Site': 6,\n 'VLAN Description': 11, 'IPR Designation': 16}\n return ea_index_temp", "def build_index(self):\n # Init the HNSWLIB index\n self.create_index()\n logger.info(f\"Building HNSWLIB index, max_elements: {len(self.corpus)}\")\n logger.debug(f\"Parameters Required: M: {self.M}\")\n logger.debug(f\"Parameters Required: ef_construction: {self.ef_construction}\")\n logger.debug(f\"Parameters Required: ef(>topn): {self.ef}\")\n\n # Then we train the index to find a suitable clustering\n self.index.add_items(self.corpus_embeddings, list(range(len(self.corpus_embeddings))))", "def clean_es(es):\n logger.info('Cleaning the database..')\n # Ignore if the index doesn't exist, as it'll be created by next queries\n es.indices.delete(\n index=EPMC_METADATA_INDEX,\n ignore=[404]\n )\n es.indices.create(\n index=EPMC_METADATA_INDEX\n )", "def ensure_indexes(self, cls):\n return self.impl.ensure_indexes(cls)", "def create_index(self, table_name, index, timeout):\n _abstract()", "def create_index(self, table_name, index, timeout):\n _abstract()", "def index_siteroot(context):\n portal = getSite()\n portal.reindexObject()", "def set_index(self, idx, rel, attrs):\n\n query = 'CREATE INDEX {} ON {} ({})'.format(idx, rel, ','.join(attrs))\n\n with self.tpch_cxn.cursor() as curs:\n try:\n curs.execute(query)\n except pg.ProgrammingError as e:\n print(e)", "def _init_es(self):\n es = Elasticsearch([{'host': elastic_conf['host'], 'port': elastic_conf['port']}])\n print('Connected to Elastic Search:', es.ping())\n return es", "def index(self):\n\n if self.cluster:\n self.cluster.index()\n else:\n super().index()", "def create_index(self, *columns):\n self._assert_columns_exist(columns)\n\n # Build index name.\n whitelist = lambda col: ''.join(x for x in col if x.isalnum())\n idx_name = '_'.join(whitelist(col) for col in columns)\n idx_name = 'idx_{0}_{1}'.format(self._table, idx_name)\n\n # Build column names.\n col_names = [self._normalize_column(x) for x in columns]\n col_names = ', '.join(col_names)\n\n # Prepare statement.\n statement = 'CREATE INDEX IF NOT EXISTS {0} ON {1} ({2})'\n statement = statement.format(idx_name, self._table, col_names)\n\n # Create index.\n cursor = self._connection.cursor()\n cursor.execute('PRAGMA synchronous=OFF')\n cursor.execute(statement)" ]
[ "0.7837676", "0.73570573", "0.735629", "0.7337516", "0.7260696", "0.72410965", "0.7223582", "0.7056203", "0.67365074", "0.6722053", "0.6716243", "0.6706365", "0.666678", "0.66666", "0.6647666", "0.6642432", "0.66221064", "0.6614326", "0.6584603", "0.656978", "0.649845", "0.6468377", "0.646647", "0.6464628", "0.639096", "0.63604903", "0.6338675", "0.63063765", "0.62832683", "0.6263301", "0.6216811", "0.6156742", "0.6150681", "0.6131796", "0.61288166", "0.61212397", "0.6091755", "0.6067716", "0.606023", "0.60591227", "0.6056292", "0.60371727", "0.6012781", "0.60125005", "0.60010016", "0.6000973", "0.5970081", "0.596808", "0.5929093", "0.5919633", "0.59118533", "0.58906424", "0.5883115", "0.58609045", "0.5858354", "0.5844356", "0.5827956", "0.58267087", "0.58261603", "0.58127284", "0.58110857", "0.5795352", "0.5794144", "0.5780957", "0.5759483", "0.57594717", "0.5751734", "0.57475334", "0.57320386", "0.5724135", "0.5705798", "0.5704756", "0.5704449", "0.5698739", "0.56956476", "0.5677753", "0.5674709", "0.567185", "0.56704074", "0.56645674", "0.56645674", "0.56612456", "0.5661077", "0.5651239", "0.5647283", "0.5644418", "0.5636467", "0.56353134", "0.56309986", "0.5629637", "0.56294703", "0.5620005", "0.5619832", "0.56045604", "0.56045604", "0.56029737", "0.5585016", "0.5574723", "0.5572941", "0.5571144" ]
0.8065617
0
Bulk each JSON data into Elasticsearch.
def bulk(self) -> None: helpers.bulk(self.client, self.gen_business_data(BUSINESS_FP)) helpers.bulk(self.client, self.gen_review_data(REVIEW_FP)) helpers.bulk(self.client, self.gen_tip_data(TIP_FP))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def post_bulk(bulk_json):\n\n nbtry=0\n success=False\n\n # Bulk insert\n ####################################################################\n cnx.request(\"POST\",config.index+\"/\"+config.typ+\"/_bulk\",bulk_json) #\n ####################################################################\n\n # Get and read response from Elastic Search server\n resp=cnx.getresponse()\n resp_msg_json= resp.read()\n #sys.stderr.write( resp_msg_json + \"\\n\")\n resp_msg=json.loads(resp_msg_json)\n # Check status: both HTTP and within the Elastic Search answer\n if resp.status != httplib.OK or resp_msg[\"errors\"] is True:\n sys.stderr.write( bulk_json)\n sys.stderr.write( resp_msg_json +\"\\n\")\n raise Exception(\"ERROR when bulk loading into %s/%s: %d %s\\n\" % (config.index,config.typ, resp.status, resp.reason))", "def bulk_index(data):\n\n def bulk_api_string(item):\n return f\"{{\\\"index\\\":{{}}\\n{json.dumps(item)}\"\n\n body = '\\n'.join([bulk_api_string(item) for item in data]) + '\\n'\n\n return make_request(\n requests.post,\n url=f\"{connection.hostname}:{connection.port}/{connection.index}/_bulk\",\n headers={'Content-Type': 'application/json'},\n auth=auth,\n data=body\n )", "def bulk_push_to_elastic(elastic_search_url, index, docs):\n CREATE_TEMPLATE = {\"create\": {\"_index\": index, \"_type\": \"_doc\", \"_id\": \"\"}}\n\n bulk_request_body = \"\"\n for doc in docs:\n CREATE_TEMPLATE[\"create\"][\"_id\"] = doc[HASH_ID]\n bulk_request_body += json.dumps(CREATE_TEMPLATE) + NEW_LINE\n bulk_request_body += json.dumps(doc) + NEW_LINE\n\n # Request\n headers = {\"content-type\": \"application/x-ndjson\"}\n\n url = elastic_search_url + \"/\" + \"_bulk\"\n\n response = requests.post(url, data=bulk_request_body, headers=headers)\n return response", "def bulk_process(self):\n\n def actions():\n try:\n task = self.queue.get(block=False, timeout=None)\n\n if task['action'] == 'index':\n yield {\n '_op_type': 'index',\n '_index': self.ensure_index(task),\n '_id': task['id'],\n 'doc': task['properties']\n }\n elif task['action'] == 'delete':\n yield {\n '_op_type': 'delete',\n '_index': self.ensure_index(task),\n '_id': task['id'],\n 'doc': task['properties']\n }\n else:\n raise NotImplementedError\n\n except Empty:\n pass\n\n for success, info in streaming_bulk(self.es_client, actions()):\n if success:\n self.queue.task_done()", "def populate(self):\n\n self.create_index()\n self.check_type()\n self.create_mapping()\n\n f = open(self.csv_file, 'rU')\n\n # Read the first line for all the headers\n headers = f.readline().split(',')\n\n # Read the rest of the document\n rows = f.readlines()\n added_counter = 0\n\n actions = []\n for row in rows:\n fields = row.split(',')\n obj = {}\n for header in headers:\n # we call lower-case here because we were originally using\n # analyzed strings in elasticsearch (and they were\n # automatically converted). Code was built based on that so it's\n # easiest to convert for now\n try:\n obj[header.replace('\\n', '')] = float(fields[\n headers.index(header)].replace('\\n', '').lower())\n except ValueError:\n obj[header.replace('\\n', '')] = fields[\n headers.index(header)].replace('\\n', '').lower()\n # check afterwards to replace empty strings with None (which json.dumps hopefully writes to null)\n if obj[header.replace('\\n', '')] == '':\n obj[header.replace('\\n', '')] = None\n try:\n item = {\n '_index': self.es_main_index,\n '_type': self.es_main_type,\n '_source': obj\n }\n\n actions.append(item)\n\n added_counter += 1\n print('%s new records added' % added_counter,\n end='\\r')\n sys.stdout.flush()\n\n if added_counter % self.chunk_size == 0:\n helpers.bulk(self.es, actions)\n actions = []\n\n except ConnectionError:\n print('There was a connection error. Check your Elastic' +\n ' Search setting and make sure Elastic Search is ' +\n 'running.')\n return False\n\n # add the remaining items\n if actions:\n helpers.bulk(self.es, actions)\n\n print('The update is completed. %s new records were added.' %\n added_counter)", "def _load_elastic(self, sqldata):\n inserts = []\n for r in sqldata:\n body = self._preprocess(dict(r.items()))\n if not body:\n continue # Skip if preprocessing returns False\n index_name = self._get_index_name(body['TIME_STAMP'])\n document = {\n \"_index\" : index_name,\n \"_type\" : 'default', # Hardcoded - we only have 1 doctype\n \"_id\" : body[self.seq_field],\n \"_source\" : body\n }\n inserts.append(document)\n\n # update sequence to last item in the results\n self.seq = sqldata[-1][self.seq_field]\n \n # Insert list of documents into elasticsearch\n status = helpers.bulk(self.es, inserts, self.chunk_size)\n self.logger.info(\"Inserted %d chunks into %s\" % (self.chunk_size,\n index_name))\n return status", "def step070() -> None:\n logger.logMessage('Begin: elasticsearch bulk update')\n client = es.Elasticsearch(hostlist)\n\n def generate():\n with open(renumFile,'r') as f:\n line = f.readline().rstrip()\n while line != '':\n fields = line.split(';')\n oper = { '_index': fields[3], \n '_op_type': 'update',\n '_id': fields[2].rstrip(),\n '_type': 'doc',\n '_source:': {'doc': {'tsa': fields[0]}}}\n \n yield oper\n line = f.readline().rstrip()\n result = eshelp.bulk(client,generate())\n logger.logMessage('Bulk result: {0}'.format(result))\n logger.logMessage('End : elasticsearch bulk update')", "def test_bulk(es_bulk, mock_es_client):\n actions = []\n chunk_size = 10\n elasticsearch.bulk(actions=actions, chunk_size=chunk_size)\n\n es_bulk.assert_called_with(\n mock_es_client.return_value,\n actions=actions,\n chunk_size=chunk_size,\n max_chunk_bytes=settings.ES_BULK_MAX_CHUNK_BYTES,\n )", "def elastic_data_sync(from_ts, to_ts, conn_obj, idx, type):\n if from_ts:\n query = {\"_id\": {\"$gt\": from_ts, \"$lte\": to_ts}}\n else:\n query = {\"_id\": {\"$lte\": to_ts}}\n pkg_meta = conn_obj.find(query)\n #Call elasticsearch bulk insert with mongo cursor\n data = {\"data_iter\": pkg_meta, \"index\": idx, \"_type\": type,\n \"mapping\": ELASTIC_MAPPINGS.get(idx, {})}\n es_sync = ElasticSearch()\n status, res = es_sync.bulk(**data)\n return status, res", "def do_bulk(self, args):\n pass", "def index_bulk_from_files(self, files):\r\n\r\n docs = self._mailextractor.extract_jsons(files) # Generator-Iterable\r\n actions = self.convert_docstrs_to_bulk_actions(docs) # Generator-Iterable\r\n\r\n self._cur_print = 0\r\n actions_for_chunk = self.print_chunk_progress(actions) # Generator-Iterable\r\n (cnt_success, errors_index) = es_helpers.bulk(\r\n self._es, actions_for_chunk, chunk_size=constants.ES_BULK_CHUNK_SIZE)\r\n\r\n cnt_total = self._mailextractor.cnt_total\r\n errors_convert = self._mailextractor.errors_convert\r\n cnt_error = len(errors_convert) + len(errors_index)\r\n return Summary(cnt_total=cnt_total, cnt_success=cnt_success, cnt_error=cnt_error,\r\n errors_convert=errors_convert, errors_index=errors_index)", "def handle(self, *args, **options):\n self.create_indices()\n self.bulk()", "def process_es_bulk(pub_list, es):\n bulk_response = es.bulk(\n body=''.join(pub_list),\n refresh='wait_for',\n request_timeout=3600,\n )\n if bulk_response.get('errors'):\n logger.error('failed on bulk indexing:\\n%s',\n bulk_response)\n raise IndexingErrorException()\n return len(pub_list)", "def _load_elastic(self, sqldata):\n from collections import defaultdict\n attributes = ResourceMetricsLoader.attr_fields.keys()\n records = defaultdict(lambda: defaultdict(int))\n for sd in sqldata:\n r = dict(sd.items())\n if r['ATTRIBUTE_NAME'] not in attributes:\n continue\n # Only store hostnames and not FQDN for resources\n r['RESOURCE_NAME'] = r['RESOURCE_NAME'].split('.')[0]\n\n (attr, val) = self._get_attr_val(r)\n records[r.get('RESOURCE_NAME'),r.get('TIME_STAMP')][attr] = val\n records[r.get('RESOURCE_NAME'),r.get('TIME_STAMP')]['INSERT_SEQ'] = r['INSERT_SEQ']\n\n # Construct docs from records\n inserts = [] \n for k, v in records.iteritems():\n body = { attr: val for attr, val in v.iteritems() } \n body['RESOURCE_NAME'], body['TIME_STAMP'] = k\n document = {\n \"_index\" : self._get_index_name(body['TIME_STAMP']),\n \"_type\" : 'default',\n \"_source\" : body\n }\n inserts.append(document)\n \n # Insert list of documents into elasticsearch\n self.logger.info(\"Loading chunk into elasticsearch\")\n status = helpers.bulk(self.es,\n inserts,\n self.chunk_size)\n self.logger.info(\"Finished loading chunk into elasticsearch\")\n\n # update sequence to last item in the results\n #self.seq = dict(results[-1].items())[self.id_field]\n self.seq = sqldata[-1][self.seq_field]\n \n return status", "def loader(index, col):\n writer = index.writer()\n feed_data = get_data(col)\n for doc in feed_data:\n idx = doc[\"_id\"]\n data = doc\n # data = json.dumps(doc)\n # print(data)\n body = dict_values_to_text(doc)\n writer.add_document(idx=idx,data=data,body=body)\n writer.commit()\n print(f\"{index} loaded successfully\")", "def upload_entities(self, batch):\n # TODO Include a Do Not Overwrite call\n results = None\n atlas_endpoint = self.endpoint_url + \"/entity/bulk\"\n\n payload = AtlasClient._prepare_entity_upload(batch)\n\n postBulkEntities = requests.post(\n atlas_endpoint,\n json=payload,\n headers=self.authentication.get_authentication_headers()\n )\n\n results = self._handle_response(postBulkEntities)\n\n return results", "def ingest_all(self, docs):\n for doc in docs:\n self.ingest(doc)", "def save(self, data):\n activities = [json.loads(activity['Json']) for activity in data]\n\n for i in range(len(activities)):\n activities[i]['created_at'] = to_datetime(activities[i]['created_at'])\n\n with Elastic(index='wink', doc_type='activity') as elastic:\n elastic.upload(activities, 'created_at')\n\n Log.info(\"Successfully uploaded wink activity data into elasticsearch.\")", "def push_bulk(self, obj_list, doc_type=None, refresh=True):\n assert isinstance(obj_list, collections.Sequence)\n assert len(obj_list) > 0\n\n es_obj_list = []\n for obj in obj_list:\n if obj is None:\n logger.warning(\"None object in input list\")\n continue\n\n doc_type, es_repr = self._validate_doc_and_get_type_and_repr(obj, doc_type)\n metadata = {\n '_op_type': 'index',\n \"_index\": self.index_name,\n \"_type\": doc_type,\n }\n es_repr.update(**metadata)\n\n es_obj_list.append(es_repr)\n\n helpers.bulk(client=self.conn.elastic_search_client, actions=es_obj_list,\n stats_only=True, refresh=u'true' if refresh else u'false')", "def indexDocsToES(indexName):\n docs = pd.DataFrame()\n try:\n docs = pd.read_csv(\"documents/full_news_documents.csv\")\n except FileNotFoundError:\n print(\"File not found\")\n\n use_these_keys = ['id', 'category', 'headline', 'authors', 'link', 'short_description', 'date', 'body']\n helpers.bulk(es_client, doc_generator(docs, indexName, use_these_keys))", "def bulk_indice(self, records: List[dict], index_name: str, pipeline: str) -> None:\n events = []\n for record in records:\n event = {\"_id\": self.calc_hash(record), \"_index\": index_name, \"_source\": record}\n if pipeline != \"\":\n event[\"pipeline\"] = pipeline\n events.append(event)\n bulk(self.es, events, raise_on_error=False)", "def load_data(self, json_file, target_index='infinity'):\n # data = json.loads(json_file)\n with open(json_file) as f:\n data = json.load(f)\n self.elasticsearch.index(index=target_index, doc_type='image_vector', body=data)", "def publish_impl(self) -> None:\n\n LOGGER.warn('ElasticsearchPublisher is being deprecated in favor of using SearchMetadatatoElasticasearchTask\\\n which publishes ES metadata with mappings compatible with amundsensearch >= 4.0.0')\n\n actions = [json.loads(line) for line in self.file_handler.readlines()]\n # ensure new data exists\n if not actions:\n LOGGER.warning(\"received no data to upload to Elasticsearch!\")\n return\n\n # Convert object to json for elasticsearch bulk upload\n # Bulk load JSON format is defined here:\n # https://www.elastic.co/guide/en/elasticsearch/reference/6.2/docs-bulk.html\n bulk_actions = []\n cnt = 0\n\n # create new index with mapping\n self.elasticsearch_client.indices.create(index=self.elasticsearch_new_index, body=self.elasticsearch_mapping)\n\n for action in actions:\n index_row = dict(index=dict(_index=self.elasticsearch_new_index))\n action['resource_type'] = self.elasticsearch_type\n\n bulk_actions.append(index_row)\n bulk_actions.append(action)\n cnt += 1\n if cnt == self.elasticsearch_batch_size:\n self.elasticsearch_client.bulk(bulk_actions)\n LOGGER.info('Publish %i of records to ES', cnt)\n cnt = 0\n bulk_actions = []\n\n # Do the final bulk actions\n if bulk_actions:\n self.elasticsearch_client.bulk(bulk_actions)\n\n # fetch indices that have {elasticsearch_alias} as alias\n elasticsearch_old_indices = self._fetch_old_index()\n\n # update alias to point to the new index\n actions = [{\"add\": {\"index\": self.elasticsearch_new_index, \"alias\": self.elasticsearch_alias}}]\n\n # delete old indices\n delete_actions = [{\"remove_index\": {\"index\": index}} for index in elasticsearch_old_indices]\n actions.extend(delete_actions)\n\n update_action = {\"actions\": actions}\n\n # perform alias update and index delete in single atomic operation\n self.elasticsearch_client.indices.update_aliases(update_action)", "def insert_to_elastic(elastic, paper_authors, papers, authors, index_name):\n helpers.bulk(elastic, merge_to_elastic(paper_authors, papers, authors, index_name))", "def create_doc_bulk(self, file: str, index: str) -> None:\n\n with open(file, \"r\") as f:\n bulk(self.es, self.make_documents(f, index))", "def add_elasticsearch_records(self, data_list):\n actions = [self.create_data_record(data_dict) for data_dict in data_list]\n self.actions_buffer.extend(actions)", "def ingest_many(self, data):\n raise NotImplementedError()", "def _index_sub(self, uri_list, num, batch_num):\n bname = '%s-%s' % (batch_num, num)\n log.debug(\"batch_num '%s' starting es_json conversion\",\n bname)\n qry_data = get_all_item_data([item[0] for item in uri_list],\n self.tstore_conn,\n rdfclass=self.rdf_class)\n log.debug(\"batch_num '%s-%s' query_complete | count: %s\",\n batch_num,\n num,\n len(qry_data))\n # path = os.path.join(CFG.dirs.cache, \"index_pre\")\n # if not os.path.exists(path):\n # os.makedirs(path)\n # with open(os.path.join(path, bname + \".json\"), \"w\") as fo:\n # fo.write(json.dumps(qry_data))\n data = RdfDataset(qry_data)\n del qry_data\n log.debug(\"batch_num '%s-%s' RdfDataset Loaded\", batch_num, num)\n for value in uri_list:\n try:\n\n self.batch_data[batch_num]['main'].append(\\\n data[value[0]].es_json())\n self.count += 1\n except KeyError:\n pass\n for name, indexer in self.other_indexers.items():\n for item in data.json_qry(\"$.:%s\" % name.pyuri):\n val = item.es_json()\n if val:\n self.batch_data[batch_num][name].append(val)\n self.batch_uris[batch_num].append(item.subject)\n del data\n del uri_list\n log.debug(\"batch_num '%s-%s' converted to es_json\", batch_num, num)", "def bulk_insert(cls, path=\"data.json\"):\n from json import load\n from codecs import open\n \n lists = load(open(path, \"r\", \"utf8\"))\n for lst in lists:\n ing = cls(content = lst)\n ing.put()", "def bulk_index_records(records):\n indexer = RecordIndexer()\n\n click.echo('Bulk indexing {} records...'.format(len(records)))\n indexer.bulk_index([str(r.id) for r in records])\n indexer.process_bulk_queue()\n click.echo('Indexing completed!')", "def build_es_bulk(line):\n action = '{\"index\": {\"_index\": \"%s\"}}' % EPMC_METADATA_INDEX\n data = line + '\\n'\n return '\\n'.join([action, data])", "def batch(self, request):\n return AlgoliaUtils_request(self.client.headers, self.write_hosts, \"POST\", \"/1/indexes/%s/batch\" % self.url_index_name, self.client.timeout, request)", "def batch(self, requests):\n return AlgoliaUtils_request(self.headers, self.write_hosts, \"POST\", \"/1/indexes/*/batch\", self.timeout, {\"requests\": requests})", "def load_file(cnx, config, data_filename):\n\n def post_bulk(bulk_json):\n \"\"\"ship a json bulk to Elastic Search for insert\"\"\"\n\n nbtry=0\n success=False\n\n # Bulk insert\n ####################################################################\n cnx.request(\"POST\",config.index+\"/\"+config.typ+\"/_bulk\",bulk_json) #\n ####################################################################\n\n # Get and read response from Elastic Search server\n resp=cnx.getresponse()\n resp_msg_json= resp.read()\n #sys.stderr.write( resp_msg_json + \"\\n\")\n resp_msg=json.loads(resp_msg_json)\n # Check status: both HTTP and within the Elastic Search answer\n if resp.status != httplib.OK or resp_msg[\"errors\"] is True:\n sys.stderr.write( bulk_json)\n sys.stderr.write( resp_msg_json +\"\\n\")\n raise Exception(\"ERROR when bulk loading into %s/%s: %d %s\\n\" % (config.index,config.typ, resp.status, resp.reason))\n\n sys.stderr.write( \"Starting loading data from %s\\n\" % data_filename )\n\n # opening data file\n with gzip.open(data_filename) as f:\n\n item_nb=0 # item number in the file\n batch_nb=0 # number of the batch within the file\n\n # bulk variables reset\n bulk_action={}\n bulk_action[\"create\"]={}\n elastic_post=\"\"\n bulk_buffer_size=0 # item number in the batch\n time1=datetime.datetime.now() # recording time before reading/decoding\n\n # Reading the file line by line\n for line in f:\n \n # decode the CSV line - If we don't manage to read the line we just ignore it...\n try:\n item_nb+=1\n\n # reading/decoding line\n ######################################\n input=config.decode_input_line(line) #\n ######################################\n bulk_buffer_size+=1\n \n # dataFile field is not really useful for analytics \n # but it allows keeping track (and potentially cleaning) what was loaded\n input[\"dataFile\"]=os.path.basename(data_filename)\n # itemNb allow to find back which line of the file corresponds to which document in the database\n input[\"itemNb\"]=item_nb-1\n \n # convert into JSON\n input_json=json.dumps(input)\n \n # Add in the buffer: \n # One Json line for the action (includes the routing/sharding info)\n # The routing will drive the partitionning of the data\n # Don't forget to specify it when running inputes\n ############################################################\n bulk_action[\"create\"][\"_routing\"]=config.getRouting(input) ### ROUTING KEY IS DEFINED HERE ###\n bulk_action_json=json.dumps(bulk_action) #\n # One Json line for the input itself #\n elastic_post+=bulk_action_json+\"\\n\"+input_json+\"\\n\" #\n ############################################################\n\n except Exception, e: # If we don't manage to read the line we just ignore it...\n sys.stderr.write( \"WARNING: unable to decode line: %s \\n\" %line)\n sys.stderr.write( str(e) )\n \n # When the buffer reaches the max size (config) then we load\n if bulk_buffer_size >= config.bulk_buffer_size_max:\n\n batch_nb+=1\n sys.stderr.write( \" Loading the batch of data #%d (%d items)\\n\" % (batch_nb,bulk_buffer_size))\n time2=datetime.datetime.now() # recording time before inserting\n read_time=int((time2-time1).total_seconds()*1000) # reading/decoding time\n\n #########################\n post_bulk(elastic_post) #\n #########################\n\n time3=datetime.datetime.now()# recording time after inserting\n post_time=int((time3-time2).total_seconds()*1000) # insert time\n\n # print statistics in stdout\n #################################################################\n print \"LOAD: %d,%d,%d\" % (bulk_buffer_size,read_time,post_time) #\n #################################################################\n\n # bulk variable reset\n elastic_post=\"\"\n bulk_buffer_size=0\n time1=datetime.datetime.now()\n\n\n # If there are remaining unloaded items in the buffer we load them\n if bulk_buffer_size>0:\n\n batch_nb+=1\n sys.stderr.write( \" Loading the batch of data #%d (%d items)\\n\" % (batch_nb,bulk_buffer_size))\n time2=datetime.datetime.now() # recording time before inserting\n read_time=int((time2-time1).total_seconds()*1000) # reading/decoding time\n \n #########################\n post_bulk(elastic_post) #\n #########################\n \n time3=datetime.datetime.now()# recording time after inserting\n post_time=int((time3-time2).total_seconds()*1000) # insert time\n \n # print statistics in stdout\n #################################################################\n print \"LOAD: %d,%d,%d\" % (bulk_buffer_size,read_time,post_time) #\n #################################################################\n \n sys.stderr.write( \"Load from %s finished sucessfully \" % data_filename)\n sys.stderr.write( \" (%d items in %d batches)\\n\" % (item_nb,batch_nb))\n\n return item_nb", "def insert(self, index, documents, batch_size=100):\n actions = []\n latest_index_id, begin_timestamp = self.__get_latest_index(index)\n\n for idx, doc in enumerate(documents):\n index_id = latest_index_id\n\n if doc[\"request_time\"] <= begin_timestamp:\n index_id = self.get_query_index(index, doc[\"request_time\"])\n\n action = {\n \"_index\": index + \"_\" + str(index_id),\n \"_type\": \"docs\",\n \"_source\": doc,\n }\n actions.append(action)\n\n if len(actions) == batch_size or idx == len(documents) - 1:\n print(\"Bulk ingesting started...\")\n\n try:\n bulk(self.client, actions, raise_on_error=True, request_timeout=200)\n except:\n print(\"Could not write the data.\")\n raise\n \n actions.clear()\n print(\"Bulk ingesting done\")\n if self.__get_index_size(index, latest_index_id) >= self.THRESHOLD:\n begin_timestamp = self.__update_index_timerange(\n index, latest_index_id\n )\n latest_index_id = self.__create_new_index(\n index, latest_index_id + 1, begin_timestamp\n )", "def commit(self):\n with self.lock:\n try:\n action_buffer = self.BulkBuffer.get_buffer()\n if action_buffer:\n successes, errors = bulk(self.elastic, action_buffer)\n except Exception as e:\n # Handle errors from bulk indexing request\n raise\n \n retry_until_ok(self.elastic.indices.refresh, index=\"\")", "def index_all(self, index_name):\n oks = 0\n notoks = 0\n for ok, item in streaming_bulk(\n self.es_client,\n self._iter_documents(index_name)\n ):\n if ok:\n oks += 1\n else:\n notoks += 1\n logging.info(\n \"Import results: %d ok, %d not ok\",\n oks,\n notoks\n )", "def store_index(self, index, doc_type, source_list, init_id):\n\n bulk_actions = []\n doc_id = init_id\n\n for source in source_list:\n data_body = ElasticSearchUtility.__index_data_body(index, doc_type, doc_id, source[\"_source\"])\n bulk_actions.append(data_body)\n doc_id += 1\n\n print 'inserting - ', len(bulk_actions)\n helpers.bulk(self.es, bulk_actions)", "def data(self):\n\n for i in json_parsed:\n name = i['name']\n number = i['number']\n address=i['address']\n banking=i['banking']\n position=i['position']\n latitude=position['lat']\n longitude=position['lng']\n bike_stands=i['bike_stands']\n status=i['status']\n insert_data(name, number, address, banking, latitude, longitude, bike_stands, status)", "def bulk_write(collection, iterable, job_id=None, unsafe=False):\n namespace = Namespace(collection.foreign_id)\n stage = get_stage(collection, OP_INDEX, job_id=job_id)\n entities = []\n for item in iterable:\n if not is_mapping(item):\n raise InvalidData(\"Failed to read input data\", errors=item)\n entity = model.get_proxy(item)\n entity = namespace.apply(entity)\n if not unsafe:\n entity = remove_checksums(entity)\n entities.append(entity)\n index_entities(stage, collection, entities)", "def bulk_update(self, request):\n serializer = MasterySerializer(\n data=request.data,\n many=True,\n )\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data)\n else:\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)", "def bulk_upsert(self, docs, namespace, timestamp):\n def docs_to_upsert():\n doc = None\n for doc in docs:\n # Remove metadata and redundant _id\n index, doc_type = self._index_and_mapping(namespace)\n doc_id = u(doc.pop(\"_id\"))\n document_action = {\n '_index': index,\n '_type': doc_type,\n '_id': doc_id,\n '_source': self._formatter.format_document(doc)\n }\n document_meta = {\n '_index': self.meta_index_name,\n '_type': self.meta_type,\n '_id': doc_id,\n '_source': {\n 'ns': namespace,\n '_ts': timestamp\n }\n }\n yield document_action\n yield document_meta\n if doc is None:\n raise errors.EmptyDocsError(\n \"Cannot upsert an empty sequence of \"\n \"documents into Elastic Search\")\n try:\n kw = {}\n if self.chunk_size > 0:\n kw['chunk_size'] = self.chunk_size\n\n responses = streaming_bulk(client=self.elastic,\n actions=docs_to_upsert(),\n **kw)\n\n for ok, resp in responses:\n if not ok:\n LOG.error(\n \"Could not bulk-upsert document \"\n \"into ElasticSearch: %r\" % resp)\n if self.auto_commit_interval == 0:\n self.commit()\n except errors.EmptyDocsError:\n # This can happen when mongo-connector starts up, there is no\n # config file, but nothing to dump\n pass", "def import_documents(self, index, documents, **kwargs):\n self._logger.info('%s documents to index into %s', len(documents), index)\n response = None\n if 'pipeline' in kwargs:\n pipeline_name = kwargs.get(\"pipeline\")\n response = helpers.bulk(self.es, documents, index=index, doc_type=self.doc_type, pipeline=pipeline_name)\n else:\n response = helpers.bulk(self.es, documents, index=index, doc_type=self.doc_type)\n\n # It returns a tuple with summary information - \n # number of successfully executed actions and either list of errors or number of errors if stats_only is set to True.\n return response", "def test_bulk_index_iterates_docs_only_once(self):\n doc = self._make_doc()\n docs = OneshotIterable([doc])\n self.adapter.bulk_index(docs) # does not raise IterableExhaustedError", "def batch_index(self, records_uuids, request_timeout=None):\n LOGGER.info(f\"Starting task `batch_index for {len(records_uuids)} records\")\n return InspireRecordIndexer().bulk_index(records_uuids, request_timeout)", "def upload(self, documents: List[ElasticDocument], vectorise_func, index: str = None) -> None:\n if not index:\n index = self._index\n\n # Add doc_store to documents\n for d in documents:\n d.doc_store = self\n # Check ID uniqueness\n check_duplicate_documents(documents)\n # Check type consistency\n check_document_types(documents)\n # Batching\n batches = batch_items(documents)\n\n for batch in batches:\n payload = []\n # Calculate vectors\n vectorise_func(batch, self)\n\n for document in batch:\n # JSON representation of document\n doc_json = document.to_elastic()\n\n # Add correct index\n doc_json[\"_index\"] = index\n\n # Rename id key\n doc_json[\"_id\"] = doc_json[\"id\"]\n del doc_json[\"id\"]\n\n payload.append(doc_json)\n\n # Bulk upload to elasticsearch\n helpers.bulk(self._client, payload)\n\n # Update index\n self._client.indices.refresh(index=self._index)", "def __init__(self, csvfile, url, index, doc_type, username, password, chunk_size=100):\n\n self.csv_file = csvfile\n self.es_url = url\n self.es_main_index = index\n self.es_main_type = doc_type\n self.es = Elasticsearch(self.es_url,\n http_auth=(username, password),\n use_ssl=True,\n verify_certs=True,\n connection_class=RequestsHttpConnection)\n self.chunk_size = chunk_size\n\n self.mapping = {\n \"properties\": {\n \"region\": {\n \"type\": \"string\",\n \"index\": \"not_analyzed\",\n \"doc_values\": True,\n \"fielddata\": {\n \"loading\": \"eager_global_ordinals\"\n }\n },\n \"agg_commodity\": {\n \"type\": \"string\",\n \"index\": \"not_analyzed\",\n \"doc_values\": True,\n \"fielddata\": {\n \"loading\": \"eager_global_ordinals\"\n }\n },\n \"agg_continent\": {\n \"type\": \"string\",\n \"index\": \"not_analyzed\",\n \"doc_values\": True,\n \"fielddata\": {\n \"loading\": \"eager_global_ordinals\"\n }\n },\n \"agg_subcontinent\": {\n \"type\": \"string\",\n \"index\": \"not_analyzed\",\n \"doc_values\": True,\n \"fielddata\": {\n \"loading\": \"eager_global_ordinals\"\n }\n },\n \"impactparameter\": {\n \"type\": \"string\",\n \"index\": \"not_analyzed\",\n \"doc_values\": True,\n \"fielddata\": {\n \"loading\": \"eager_global_ordinals\"\n }\n },\n \"commodity\": {\n \"type\": \"string\",\n \"index\": \"not_analyzed\",\n \"doc_values\": True,\n \"fielddata\": {\n \"loading\": \"eager_global_ordinals\"\n }\n },\n \"year\": {\n \"type\": \"integer\"\n },\n \"Val\": {\n \"type\": \"float\"\n }\n }\n }", "def execute(self):\n for coll in list(self.__bulks):\n try:\n bulkOp = self.__bulks[coll]\n curr_result = Counter(bulkOp.execute())\n self.update_results(coll, curr_result)\n except BulkWriteError as bwe:\n sys.stderr.write(str(bwe.details))", "def index_batch(self,batch):\n pass", "def processed_bulk(self, pipeline):\n docs = [Document([], text=t) for t in EN_DOCS]\n return pipeline(docs)", "def bulk_refresh():\n logger.info(\"Refreshing/Updating persuasions in bulk\")\n try:\n request_data = json.loads(request.data)\n args = request.args\n with concurrent.futures.ThreadPoolExecutor(max_workers=settings.MAX_WORKERS) as executor:\n {executor.submit(PersuasionServices.refresh, data, args): data for data in request_data}\n\n return jsonify(\n dict(status=\"success\", message=\"Your request is in the queue, persuasion will be updated shortly\"))\n except Exception as e:\n logger.error(\"Exception while creating persuasions in bulk - \" + repr(e))\n return jsonify(dict(status=\"failure\", error=repr(e)))", "def bulkInsert(self, url, values):\n pass", "def import_into_elasticsearch(tf, es, max_epmc_metadata=1000):\n\n insert_sum = 0\n with ThreadPool(THREADPOOL_SIZE) as pool:\n if THREADPOOL_SIZE > 1:\n pool_map = pool.imap\n else:\n pool_map = map\n for line_count in pool_map(\n partial(\n process_es_bulk,\n es=es,\n ),\n yield_metadata_chunk(\n tf,\n chunk_size=CHUNCK_SIZE,\n max_epmc_metadata=max_epmc_metadata,\n )\n ):\n insert_sum += line_count\n return es.count(index=EPMC_METADATA_INDEX)['count'], insert_sum", "async def send_to_elastic(self, data, index='wallarm'):\n self.es.index(body=data, index=index)\n return print('Sent successfully')", "def step010():\n logger.logMessage('Begin: Getting candidate documents from elasticsearch')\n\n def limitHour(d):\n thish = d.start_time.tz_localize(tz='UTC')\n nexth = thish + dt.timedelta(hours=1)\n return { 'range': { 'time': {'gte':thish, 'lt':nexth } } }\n \n conn = sql.create_engine(pgurl)\n client = es.Elasticsearch(hostlist)\n dupesDF = pd.read_sql_table('weather_dupes',conn).set_index('time')\n hours =dupesDF.to_period('H').reset_index()['time'].unique()\n ranges = [ limitHour(h) for h in hours ]\n query = { \n '_source': [ 'tsa','time' ],\n 'query': { \n 'bool': { 'should': ranges } \n } \n }\n #logger.logMessage(level='DEBUG',message='Query body: {0}'.format(query))\n hits = eshelp.scan(client=client,index=indexName,doc_type='doc',query=query)\n numRecs = 0\n with open(candidatesFile,'w') as f:\n for h in hits:\n src = h['_source']\n tsa = int(src['tsa'])\n time = src['time']\n docid = h['_id']\n idx = h['_index']\n f.write(f'{tsa:014d};{time:25s};{docid:32s};{idx:32s}\\n') \n numRecs += 1\n if numRecs % 1000 == 0:\n logger.logMessage(level='DEBUG',message=\"{0:9d} records written\".format(numRecs))\n logger.logMessage(message=\"{0:9d} total records written\".format(numRecs))\n logger.logMessage('End: Getting candidate documents from elasticsearch')", "def insert(input_json): \n client, index_name = connection_es()\n datetime_ = datetime.datetime.now().strftime(\"%Y.%m.%d_%H:%M:%S\")\n fs_metadata_name = index_name+datetime_\n res = client.index(index = fs_metadata_name, doc_type = 'nvisnx', body = input_json)\n return res", "def run_bulk_complete():\n data = flask.request.json\n parameters = {\"data\": data}\n\n if _detect_files(data):\n try:\n gdrive.get_http_auth()\n except gdrive.GdriveUnauthorized:\n response = app.make_response((\"auth\", 401,\n [(\"Content-Type\", \"text/html\")]))\n return response\n parameters[\"credentials\"] = flask.session.get('credentials')\n\n bg_task = background_task.create_task(\n name=\"bulk_complete\",\n url=flask.url_for(bulk_complete.__name__),\n queued_callback=bulk_complete,\n parameters=parameters\n )\n db.session.commit()\n return bg_task.make_response(\n app.make_response((utils.as_json(bg_task), 200,\n [('Content-Type', \"text/json\")]))\n )", "def yield_publications_metadata(tf):\n logger.info('Start yielding...')\n with gzip.GzipFile(fileobj=tf, mode='r') as json_file:\n for index, line in enumerate(json_file):\n yield build_es_bulk(line.decode('utf-8'))", "def _process_json(self, json_content):\n if self._ns_sqlcon.connection is None:\n LOG.error(f'failed to open connection to DB')\n return\n entries = [entry for entry in json_content]\n LOG.info('started updating DB')\n num_of_entries = len(entries)\n for x in range(num_of_entries):\n entry = entries[x]\n try:\n self._ns_sqlcon.update_plugins_table(entry['_source'])\n except AttributeError:\n LOG.exception(f'malformed entry: {entry}')\n if x % 2000 != 0:\n continue\n LOG.info(f'Updated {x} records')\n\n LOG.info(f'Updated {num_of_entries} records')\n try:\n LOG.info('Commit started')\n self._ns_sqlcon.session.commit()\n LOG.info('Commit finished')\n except sqlalchemy.exc.IntegrityError:\n LOG.exception('failed committing updates to DB')\n self._ns_sqlcon.session.rollback()\n\n LOG.info('Finished updating DB')", "def bulk_insert(objects, model, session):\n session.bulk_insert_mappings(model, objects)", "def ingest_data(args):\n fetchopts = {\n \"fixtures\": FIXTURES,\n \"startyear\": args.start_year or fetch.STARTYEAR,\n \"endyear\": args.end_year or fetch.ENDYEAR\n }\n\n folder, num_series = fetch.fetch_all(**fetchopts)\n\n fcsv, num_rows = wrangle.wrangle_csv()\n fjson, _ = wrangle.wrangle_json()\n\n return (\n \"Ingested %i rows in %i time series to %s\\n\"\n \"Wrote JSON data to %s\\n\"\n \"Wrote CSV data to %s\"\n ) % (num_rows, num_series, folder, fcsv, fjson)", "def updateDocumentAll(self, documents):\n docs = []\n for document in documents:\n if isinstance(document, couch.Document):\n document = document.getData()\n\n # these are required params\n if \"_id\" not in document or \"_rev\" not in document:\n raise Exception(\"Both _id & _rev fields are required!\")\n\n docs.append(document)\n\n return self.client.post(self.name +\"/_bulk_docs\", None,\n {\"docs\": docs}).getBodyData()", "def test_bulk_iterates_actions_only_once(self):\n doc = self._make_doc()\n actions = OneshotIterable([BulkActionItem.index(doc)])\n self.adapter.bulk(actions) # does not raise IterableExhaustedError", "def monkeypatch_es():\n if _monkeypatched_es:\n return\n\n if VERSION == (0, 4, 5):\n def normalize_bulk_return(fun):\n \"\"\"Set's \"ok\" based on \"status\" if \"status\" exists\"\"\"\n @wraps(fun)\n def _fixed_bulk(self, *args, **kwargs):\n def fix_item(item):\n if 'status' in item['index']:\n item['index']['ok'] = (\n 200 <= item['index']['status'] < 300)\n return item\n\n ret = fun(self, *args, **kwargs)\n if 'items' in ret:\n ret['items'] = [fix_item(item) for item in ret['items']]\n return ret\n return _fixed_bulk\n\n Elasticsearch.bulk = normalize_bulk_return(Elasticsearch.bulk)", "def create_index():\n es = Elasticsearch()\n es.indices.delete(index='drugaid-index')\n with open('../data_crawling/demo_drug_data-fixed.json') as f:\n drug_data = json.load(f)\n\n for drug in drug_data:\n es.index(index='drugaid-index', doc_type='drug', body=drug)", "def indexable_objects_iter(docs, es_index, es_doctype):\n\n for doc in docs:\n insert_doc = {\n '_index': es_index,\n '_type': es_doctype,\n '_id': \"%s-%s\" % (doc['state'], doc['id']),\n '_source': doc\n }\n yield insert_doc", "def index_placetypes(self, file_path):\n\n datadir = f'{file_path}/data'\n logging.debug(\"Indexing %s\", datadir)\n\n docs = []\n counter = 0\n for root, _, files in os.walk(datadir):\n for file in files:\n if '.json' in file:\n placefile = os.path.join(root, file)\n logging.info(\"Loading %s\", placefile)\n with open(placefile, encoding='UTF-8') as ifh:\n doc = json.load(ifh)\n doc['_index'] = self.index\n doc['_id'] = doc['id']\n docs.append(doc)\n counter = counter + 1\n\n if docs:\n helpers.bulk(self.esclient, docs)\n\n logging.info(\"Placetypes added %s docs\", counter)\n logging.info(\"Finished indexing %s\", datadir)", "def update_from_indexes(self, data, **kw):\n for i in data:\n self.update_from_index(i, **kw)", "def run(self, mapping={}, *args, **kwargs):\n self.processed = 0\n for batch in self._process_by_batch(self.load(*args, **kwargs)):\n batch = list(map(lambda doc: self._apply_mapping(doc, mapping), batch))\n for doc in batch:\n self._ingest(iterable=doc, doctype=doc[\"doctype\"])\n self.processed += 1\n logger.info(\"Added {} documents to the database.\".format(self.processed))", "def test_get_Student_bulk(self):\n school_ids = self.create_School(2,20)\n url = '/students'\n for i in range(10):\n data = {'first_name': 'Poompatai', 'last_name': 'Puntitpong','age': 20, 'nationality': 'Thailand', 'school': school_ids[0]}\n response = self.client.post(url, data, format='json')\n\n response = self.client.get(url, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(response.data['count'], 10)\n self.assertEqual(response.data['results'][0]['first_name'], 'Poompatai')\n self.assertEqual(response.data['results'][0]['last_name'], 'Puntitpong')\n self.assertEqual(response.data['results'][0]['age'], 20)\n self.assertEqual(response.data['results'][0]['nationality'], 'Thailand')\n self.assertEqual(response.data['results'][0]['school'], school_ids[0])", "def parallel_import_documents(self, index, documents, **kwargs):\n \n # Set default values in passed as kwargs\n chunk_size = kwargs.get('chunk_size', None)\n if chunk_size is None:\n chunk_size = 20000\n kwargs['chunk_size'] = chunk_size\n \n request_timeout = kwargs.get('request_timeout', None)\n if request_timeout is None:\n request_timeout = 3600\n kwargs['request_timeout'] = request_timeout\n \n doc_type = kwargs.get('doc_type', None)\n if doc_type is None:\n doc_type = \"_doc\"\n kwargs['doc_type'] = doc_type\n \n raise_on_exception = kwargs.get('raise_on_exception', None)\n if raise_on_exception is None:\n raise_on_exception = False\n kwargs['raise_on_exception'] = raise_on_exception\n \n raise_on_error = kwargs.get('raise_on_error', None)\n if raise_on_error is None:\n raise_on_error = False\n kwargs['raise_on_error'] = raise_on_error\n \n self._logger.info('%s documents to index into %s', len(documents), index)\n doc_count = 0 \n \n if len(documents) > 0:\n for success, info in helpers.parallel_bulk(self.es, documents, index=index, **kwargs):\n if not success:\n self._logger.error(f'A document failed: {info}')\n else:\n doc_count += 1\n \n self._logger.info('%s documents indexed into %s', doc_count, index)\n \n return doc_count", "def bulkupload(self, string, bibo):\n if not self.filemode:\n self.bulknum += 1\n self.esdocs.append(self.rdf2es(string, bibo))\n\n if self.filemode:\n # Output content to file\n #I think we shouldn't serialize the content in memory in the output-file mode\n\n for outer in self.esdocs:\n for inner in outer:\n #self.of.write(dumps(inner, separators='\\n'))\n #we need this json dump method because the content is stored in a dictionary structure - as far as I understand it\n #so we can't just write a string\n dump(inner, self.of)\n #dump(bytes(inner,'UTF-8'), self.of)\n self.writtenDocuments += 1\n\n self.of.write('\\n')\n #perhaps flush it only in bigger chunks? - later\n #self.of.flush()\n del self.esdocs[:]\n if self.writtenDocuments >= self.bulksize:\n self._closeFile()\n self.writtenDocuments = 0\n self._openFile()\n\n elif self.bulknum >= self.bulksize:\n # Perform bulk upload\n helpers.bulk(client=self.of, actions=self.esdocs, stats_only=True)\n # Reset counter and empty list\n self.bulknum = 0\n del self.esdocs[:]", "def insert_data_bulk(self, table_name, data):\n if len(data) == 0:\n return\n\n fields = \", \".join(data[0].keys())\n value_placeholders = \", \".join([\"%s\" for f in data[0].keys()])\n query = \"INSERT INTO %s(%s) VALUES (%s)\" % (table_name, fields, value_placeholders)\n\n data = [tuple(self.pack(data_point.values())) for data_point in data]\n\n chunk_size = 50000\n data_chunks = [data[i:i + chunk_size] for i in range(0, len(data), chunk_size)]\n for chunk in data_chunks:\n self.cursor.executemany(query, chunk)\n self.db_connection.commit()", "def import_data(filename,\n index_name,\n type_name,\n delimiter,\n server,\n delete_type=False,\n field_translations=None,\n mapping=None,\n username=None,\n password=None,\n bulk_index_count=BULKINDEX_COUNT,\n timeout=None,\n verify=True):\n if server is None:\n server = SERVER_DEFAULT\n\n if bulk_index_count is None:\n bulk_index_count = BULKINDEX_COUNT\n\n data_lines = utils.retrieve_file_lines(filename)\n\n if len(data_lines) < 2:\n print \"there is no data to import in \" + filename\n return\n\n es = ElasticSearchConnection(server, username, password, timeout, verify)\n full_url = server + \"/\" + index_name + \"/\" + type_name\n\n if delete_type:\n print \"clearing existing documents from \" + full_url\n es.clear_documents(index_name, type_name)\n\n if es.ensure_index(index_name):\n if mapping is not None:\n print \"applying mapping from \" + mapping + \" to \" + full_url\n try:\n mapping_def = json.loads(utils.retrieve_file(mapping))\n es.ensure_mapping(index_name, type_name, mapping_def)\n except ValueError:\n print \"supplied JSON was not formatted correctly, skipping this step\"\n\n start_time = time.time()\n\n # ensure large fields can be parsed\n csv.field_size_limit(sys.maxsize)\n\n # translate field names if applicable\n if field_translations is not None:\n reader = translate_fields_reader(data_lines, field_translations, delimiter)\n else:\n reader = csv.DictReader(data_lines, delimiter=delimiter)\n\n # closure for displaying status of operation\n def show_status(current_count, total_count):\n percent_complete = current_count * 100 / total_count\n sys.stdout.write(\"\\rstatus: %d%%\" % percent_complete)\n sys.stdout.flush()\n\n print \"importing data into \" + full_url + \" (\" + str(\n bulk_index_count) + \" rows at a time) from file \" + filename\n count = es.bulk_index_docs(reader,\n index_name,\n type_name,\n bulk_index_count,\n show_status)\n\n # indicate completion\n show_status(100, 100)\n end_time = time.time() - start_time\n print \", import of \" + str(count) + \" documents completed in %.2f seconds\" % end_time\n\n else:\n print \"index at \" + server + \"/\" + index_name + \" can't be written to\"\n\n return", "def _send_data(self):\n \n # Do not send more than 100 datasets each time (totally arbitrary)\n MAX_DATA_SETS_PER_POST = 100\n data_to_send = self._data_buffer[0:MAX_DATA_SETS_PER_POST]\n data_to_keep = self._data_buffer[MAX_DATA_SETS_PER_POST:]\n\n # Prepare data string with the values in data buffer\n now = time.time()\n data_string = '[' \n for (timestamp, data) in data_to_send:\n data_string += '['\n data_string += str(round(timestamp-now,2))\n for sample in data:\n data_string += ','\n data_string += str(sample)\n data_string += '],'\n # Remove trailing comma and close bracket\n data_string = data_string[0:-1]+']'\n\n self._log.debug(\"Data string: \" + data_string)\n \n # Prepare URL string of the form\n # 'http://domain.tld/emoncms/input/bulk.json?apikey=\n # 12345&data=[[-10,10,1806],[-5,10,1806],[0,10,1806]]'\n url_string = self._settings['protocol'] + self._settings['domain'] + \\\n self._settings['path'] + \"/input/bulk_json?apikey=\" + \\\n self._settings['apikey'] + \"&data=\" + data_string\n self._log.debug(\"URL string: \" + url_string)\n\n # Send data to server\n self._log.info(\"Sending to \" + \n self._settings['domain'] + self._settings['path'])\n try:\n result = urllib2.urlopen(url_string, timeout=60)\n except urllib2.HTTPError as e:\n self._log.warning(\"Couldn't send to server, HTTPError: \" + \n str(e.code))\n except urllib2.URLError as e:\n self._log.warning(\"Couldn't send to server, URLError: \" + \n str(e.reason))\n except httplib.HTTPException:\n self._log.warning(\"Couldn't send to server, HTTPException\")\n except Exception:\n import traceback\n self._log.warning(\"Couldn't send to server, Exception: \" + \n traceback.format_exc())\n else:\n if (result.readline() == 'ok'):\n self._log.debug(\"Send ok\")\n # Send ok -> empty buffer\n self._data_buffer = data_to_keep\n return True\n else:\n self._log.warning(\"Send failure\")", "def load_variant_bulk(self, variants):\n if len(variants) == 0:\n return\n\n LOG.debug(\"Loading variant bulk\")\n try:\n result = self.variant_collection.insert_many(variants)\n except (DuplicateKeyError, BulkWriteError) as err:\n # If the bulk write is wrong there are probably some variants already existing\n # In the database. So insert each variant\n LOG.warning(\"Bulk insertion failed - attempting separate variant upsert for this bulk\")\n for var_obj in variants:\n try:\n self.upsert_variant(var_obj)\n except IntegrityError as err:\n pass\n\n return", "def flush_buffer(self):\n if not self.actions_buffer:\n return\n\n # reset actions buffer and take what's currently in the list\n actions = self.actions_buffer\n self.actions_buffer = []\n try:\n elasticsearch.helpers.bulk(self.client, actions, stats_only=True)\n except Exception as e:\n # put actions back if it failed\n self.actions_buffer.extend(actions)\n raise", "def bulkupload_entities(self, entity_upload_parameters, tmp_file, progress=None):\n\n file_upload_parameters = FileUploadParameters(\n upload_file_path=tmp_file,\n result_file_directory=entity_upload_parameters.result_file_directory,\n result_file_name=entity_upload_parameters.result_file_name,\n overwrite_result_file=entity_upload_parameters.overwrite_result_file,\n response_mode=entity_upload_parameters.response_mode,\n compress_upload_file=True,\n )\n result_file_path = self.upload_file(\n file_upload_parameters=file_upload_parameters,\n progress=progress,\n )\n with BulkFileReader(result_file_path, result_file_type=ResultFileType.upload) as reader:\n for entity in reader:\n yield entity", "def bulk_builder(self, changes):\n for change in changes:\n try:\n t = self.change_trigger(change)\n if t is not None:\n tr = self.change_transform(t)\n if tr is not None:\n self.change_transport(tr)\n\n yield {\n \"index\": {\"_index\": self.es_index, \"_type\": self.es_type,\n \"_id\": tr['_id']}}\n yield tr\n except Exception, ex:\n pillow_logging.error(\"Error on change: %s, %s\" % (change['id'], ex))", "def getDataBatch(self, batch_size):\n for i in range(batch_size):\n params.offset = params.offset+i #increment by 1 for the next set of batch\n url = 'https://api.nytimes.com/svc/search/v2/articlesearch.json'\n url_params = {'q': self.args.query.replace(' ', '+'),'api-key': self.args.api_key,'page': params.offset}\n response = requests.get(url, params=url_params)\n r = response.json()\n\n #start by checking call was successful\n if response.ok:\n if r['status'] != 'OK':\n log.error(\"Error with API call, NYT status not ok\")\n return None\n\n # TODO: implement - this dummy implementation returns one batch of data\n list_of_art = []\n for art in r['response']['docs']:\n list_of_art.append(functions.flatten_json(art)) #attach to list returned in call\n yield list_of_art\n else:\n log.error(\"Error during API call on request side\")", "def bulk_get_documents():\n ids = flask.request.json\n if not ids:\n raise UserError(\"No ids provided\")\n if not isinstance(ids, list):\n raise UserError(\"ids is not a list\")\n\n with blueprint.index_driver.session as session:\n # Comment it out to compare against the eager loading option.\n # query = session.query(IndexRecord)\n # query = query.filter(IndexRecord.did.in_(ids)\n\n # Use eager loading.\n query = session.query(IndexRecord)\n query = query.options(\n joinedload(IndexRecord.urls).joinedload(IndexRecordUrl.url_metadata)\n )\n query = query.options(joinedload(IndexRecord.acl))\n query = query.options(joinedload(IndexRecord.authz))\n query = query.options(joinedload(IndexRecord.hashes))\n query = query.options(joinedload(IndexRecord.index_metadata))\n query = query.options(joinedload(IndexRecord.aliases))\n query = query.filter(IndexRecord.did.in_(ids))\n\n docs = [q.to_document_dict() for q in query]\n return flask.Response(json.dumps(docs), 200, mimetype=\"application/json\")", "def process_datasets(self):\n\n with open(self.mappings, \"r+\") as json_file:\n emsl_to_jgi = json.load(json_file)\n emsl_to_jgi_copy = copy.deepcopy(emsl_to_jgi)\n\n contaminant_file_loc = emsl_to_jgi[\"contaminant_file_loc\"]\n # run for each dataset\n for dataset_id, values in emsl_to_jgi.items():\n if dataset_id not in [\n \"contaminant_file_loc\",\n \"analysis_activity_file_loc\",\n \"data_objects_file_loc\",\n \"STUDY\",\n \"tools_used\",\n ]:\n raw_file_loc = values[\"raw_file_loc\"]\n self.dataset_name = values[\"dataset_name\"]\n # dataset search against a fasta file\n for genome_directory, locations in values[\n \"genome_directory\"\n ].items():\n # clear object to prepare next job\n ANALYSIS_JOBS_OBJECT.clear()\n\n # create log_dir\n self.save_job_results = os.path.join(\n self.result_loc, dataset_id, genome_directory\n )\n self.log_collected_at = os.path.join(\n os.path.abspath(self.save_job_results), \"analysis_jobs_logs\"\n )\n if not os.path.exists(self.log_collected_at):\n os.makedirs(self.log_collected_at)\n\n files = [locations[\"faa_file_loc\"], contaminant_file_loc]\n contaminated_faa_file_loc = self.contaminate_fasta(files)\n\n self.register_job_in_emsl_to_jgi(\n dataset_id,\n genome_directory,\n \"contaminated_faa_file_loc\",\n contaminated_faa_file_loc,\n emsl_to_jgi_copy,\n )\n # convert .faa to .txt\n faa_txt_file = self.convert_faa2txt(\n dataset_id, contaminated_faa_file_loc\n )\n self.register_job_in_emsl_to_jgi(\n dataset_id,\n genome_directory,\n \"txt_faa_file_loc\",\n faa_txt_file,\n emsl_to_jgi_copy,\n )\n\n # log & run job\n self.run_n_log_job(\n dataset_id,\n genome_directory,\n contaminated_faa_file_loc,\n raw_file_loc,\n emsl_to_jgi_copy,\n )\n\n # merge analysis\n resultant_file = self.merge_analysis_jobs(\n dataset_id, genome_directory\n )\n self.register_job_in_emsl_to_jgi(\n dataset_id,\n genome_directory,\n \"resultant_file_loc\",\n resultant_file,\n emsl_to_jgi_copy,\n )\n\n # capture the job metadata object\n logger.info(\"Jobrun\", extra=LOGGED_ANALYSIS_JOB)\n\n # update emsl_to_jgi.json\n json_file.seek(0) # move back to BOF.\n json_file.truncate()\n json_file.write(json.dumps(emsl_to_jgi_copy, default=str, indent=4))\n pass", "def bulk_insert(engine, model, entries):\n with session_scope(engine) as session:\n session.bulk_insert_mappings(model, entries)\n session.commit()", "def submit_data_to_es(client, doc_path, doc_contents):\n try:\n doc_text = str(doc_contents.decode('utf8'))\n body = {'text': doc_text}\n client.index(index=ES_INDEX, id=doc_path, doc_type='_doc', body=body)\n except Exception as error: # pylint: disable=broad-except\n logging.error('%s: %s', type(error), error)\n sys.exit(1)", "def run_full(self):\n # Get a cursor of all the keywords in the databse\n keyword_cursor = self.mongo_controller.get_keyword_batch_cursor()\n\n # Go over each batch\n for batch in keyword_cursor:\n\n # Go over each keyword in the batch\n for keyword_dict in bson.decode_all(batch):\n\n keyword = Keyword.from_dict(keyword_dict) # Cast the keyword to a Keyword object\n twitter_results = self.crawler.search(keyword, limit=self.limit_requests) # Run the search\n self.__save_tweets(twitter_results) # Save all tweets to the DB", "def upload_entities(self, entity_upload_parameters, progress=None):\n \n tmp_file = path.join(self.working_directory, '{0}.csv'.format(uuid.uuid1()))\n with BulkFileWriter(tmp_file) as writer:\n for entity in entity_upload_parameters.entities:\n writer.write_entity(entity)\n\n if (self.need_to_try_upload_entity_records_sync_first(entity_upload_parameters)):\n return self.bulkupload_entitie_records(entity_upload_parameters, tmp_file, progress)\n else:\n return self.bulkupload_entities(entity_upload_parameters, tmp_file, progress)", "def write(cls, templates, values, *args):\n IndexBacklog = Pool().get('elasticsearch.index_backlog')\n Product = Pool().get('product.product')\n\n rv = super(Template, cls).write(templates, values, *args)\n\n products = []\n for template in templates:\n products.extend([Product(p) for p in template.products])\n IndexBacklog.create_from_records(products)\n return rv", "def populate_employees():\n employees = get_employees()\n\n db.session.bulk_save_objects(employees)\n db.session.commit()", "def convert_to_es_format(tweet):\n action = [\n {\n \"_index\": config.ELASTICSEARCH['index'],\n \"_type\": config.ELASTICSEARCH['type'],\n \"_source\": {\n \"tweet\": tweet\n }\n }\n ]\n return action", "def bulk_load(config):\n from aleph.logic.collections import create_collection\n for foreign_id, data in config.items():\n data['label'] = data.get('label', foreign_id)\n collection = create_collection(foreign_id, data)\n for query in dict_list(data, 'queries', 'query'):\n bulk_load_query.apply_async([collection.id, query], priority=6)", "def _es_push_indexes(self, content):\n for c in self.es_clients:\n c.create_index(content)", "def bulkupload_entitie_records(self, entity_upload_parameters, tmp_file, progress=None):\n records = self.service_client.factory.create(\"ns2:ArrayOfstring\")\n tmp_csv_file = io.open(tmp_file, encoding='utf-8-sig')\n\n records.string = [x.strip() for x in tmp_csv_file.readlines()]\n \n try:\n #print(self.service_client)\n response = self.service_client.UploadEntityRecords(\n AccountId=self._authorization_data.account_id,\n EntityRecords=records,\n ResponseMode=entity_upload_parameters.response_mode\n )\n if self.need_to_fall_back_to_async(response):\n headers = self.service_client.get_response_header()\n operation = BulkUploadOperation(\n request_id=response.RequestId,\n authorization_data=self._authorization_data,\n poll_interval_in_milliseconds=self._poll_interval_in_milliseconds,\n environment=self._environment,\n tracking_id=headers['TrackingId'] if 'TrackingId' in headers else None,\n **self.suds_options\n )\n file_path = self.download_upload_result(operation, entity_upload_parameters, progress)\n return self.read_result_from_bulk_file(file_path)\n else:\n return self.read_bulkupsert_response(response) \n except Exception as ex:\n if 'OperationNotSupported' == operation_errorcode_of_exception(ex):\n return self.bulkupload_entities(entity_upload_parameters, tmp_file, progress)\n else:\n raise ex", "def WriteEventBody(self, event_object):\n self._data.append(self._EventToDict(event_object))\n self._counter += 1\n\n # Check if we need to flush.\n if self._counter % 5000 == 0:\n self._elastic_db.bulk_index(self._index_name, self._doc_type, self._data)\n self._data = []\n sys.stdout.write('.')\n sys.stdout.flush()", "def test_otoroshi_controllers_adminapi_tcp_service_api_controller_bulk_create_action(self):\n pass", "def _dispatch_metrics(self, payload):\n for item in payload:\n try:\n self._ingest.send(gauges=item['gauges'], counters=item['counters'])\n except Exception as e:\n self._logger.error(\"Exception while sending payload to ingest : {0}\".format(e))", "def bulk_cavs_search():\n\n data = flask.request.json\n if not data or not data.get(\"ids\"):\n return exceptions.BadRequest()\n response = _get_bulk_cad_assessment_data(data)\n return flask.Response(json.dumps(response), mimetype='application/json')", "def ingest(self, data):\n self.print_func('Ingesting data from {} feed.'.format(self.feed['feedname']))\n data = self.parse_to_json(data)\n new_statuses, generate_out_rec, prefix, field_name_tuple = self.generate_fp_status_dict(data)\n\n for fp, current_status in new_statuses.items():\n key = prefix+fp\n out_rec = generate_out_rec(current_status)\n if self.s3helper.path_exists(self.bucket, key):\n out_recs = self.combine_with_existing_recs(key, out_rec, field_name_tuple)\n if out_recs is None:\n continue\n else:\n out_recs = [out_rec]\n self.n_new_fps += 1\n self.s3helper.write_recs(out_recs, self.bucket, key)\n\n self.print_func('{} status found in {} feed: {} skipped, {} overwrites, {} updates, {} new files'.format(\n len(new_statuses), self.feed['feedname'], self.n_skipped, self.n_overwrite, self.n_new_status, self.n_new_fps))", "def export_documents(self, index, filename, **kwargs):\n documentsGenerator = self.get_documents(index, **kwargs)\n documents = []\n format=kwargs.get('format','json')\n for doc in documentsGenerator:\n doc_with_id={**doc.to_dict(),'_id':doc.meta.id}\n documents.append(doc_with_id)\n self.__export_documents(documents,filename,exportformat=format)", "def test_ingest():\n schema = pa.schema([\n pa.field(\"foo\", pa.int64()),\n pa.field(\"bar\", pa.int64())\n ])\n\n data = [{\"foo\": 1, \"bar\": 2}, {\"foo\": 10, \"bar\": 20}]\n\n converted_data = client.ingest_data(data, schema)\n assert converted_data.to_pydict() == {'foo': [1, 10], 'bar': [2, 20]}", "def _update_bulk(self, iterable):\n self.cursor.executemany(self.UPDATE, iterable)" ]
[ "0.7087452", "0.70536715", "0.69762194", "0.6880638", "0.6755723", "0.6637794", "0.65397894", "0.63219965", "0.6277341", "0.6203022", "0.6132802", "0.6057554", "0.60138315", "0.5994877", "0.5978946", "0.5965338", "0.59499013", "0.5907674", "0.5890695", "0.5883793", "0.58794004", "0.58497405", "0.5847242", "0.5798901", "0.57946473", "0.578739", "0.5772917", "0.5752029", "0.5740829", "0.57040435", "0.5699966", "0.5647884", "0.56323063", "0.56321156", "0.56296396", "0.5627802", "0.55779713", "0.5553732", "0.553606", "0.55194896", "0.5519196", "0.55131847", "0.5504139", "0.549346", "0.5488899", "0.5466632", "0.5444758", "0.5417287", "0.5406265", "0.5385116", "0.53814334", "0.5379359", "0.5365553", "0.5365366", "0.53500766", "0.534957", "0.53266704", "0.5322859", "0.5320275", "0.53172565", "0.5276391", "0.5270241", "0.5266827", "0.5259974", "0.525261", "0.52440786", "0.5233863", "0.52198714", "0.5215353", "0.52052534", "0.518912", "0.5179053", "0.51691025", "0.51643074", "0.5135302", "0.513464", "0.5121063", "0.512034", "0.511891", "0.51165646", "0.51100385", "0.5109094", "0.5107941", "0.5103107", "0.5098048", "0.5087897", "0.5079356", "0.50568956", "0.5055347", "0.50391304", "0.5034707", "0.50309557", "0.5021466", "0.50201726", "0.5011384", "0.5004892", "0.5003035", "0.49977502", "0.49933755", "0.49931774" ]
0.57462233
28
Preprocess and generate business data.
def gen_business_data(fp: str) -> None: with open(fp, encoding='utf-8') as f: for line in f: data = json.loads(line) if "categories" in data.keys() and data["categories"]: data["categories"] = [s.strip() for s in data["categories"].split(',')] utils.preprocess_raw_json(data) utils.flatten_business_attributes(data) doc = { "_index": "business", "_source": data } yield doc
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pre_process(self):\n pass", "def pre_process(self):\n pass", "def pre_process(self):\n pass", "def pre_process(self):\n pass", "def pre_process(self):\n pass", "def preprocess(self):\n pass", "def preprocess(self):\n pass", "def preprocess(self):\n pass", "def _build_preprocessing(self):\n\n # For now, do nothing\n pass", "def test_data_preprocessing(raw_data): \r\n\r\n # get data output\r\n data_output = raw_data[['Submitby Date Time', 'Challenge Manager', 'Challenge Copilot', 'Posting Date Date', 'Track',\r\n 'Technology List', 'First Place Prize', 'Num Registrations', 'Total Prize']]\r\n with open('cache/extended_columns.pkl', 'rb') as f:\r\n extended_columns = pickle.load(f)\r\n with open('cache/num_date_columns.pkl', 'rb') as f:\r\n max_date_columns = pickle.load(f)\r\n \r\n data_output = class_binaryzation_for_test(data_output, extended_columns)\r\n try:\r\n data_output = date_separation1(data_output, max_num_columns=NUM_DATE_COLUMNS)\r\n except:\r\n data_output = date_separation2(data_output)\r\n data_output = money_digitalization(data_output)\r\n data_output = get_date_in_days(data_output)\r\n data_output['Days from Posting to Submit'] = data_output['Submitby Date Time Days from 2016'] \\\r\n - data_output['Posting Date Date Days from 2016'] \r\n\r\n return data_output", "def preprocess(self):", "def preprocess_data(self):\n\n self._preprocess_train_data()\n self._preprocess_test_data()", "def preprocess(data):\n raise NotImplementedError", "def preprocess(self):\n raise RuntimeError(\"please implement this function!\")", "def _prepare_data(self):\n #TODO hardcoded values need to change\n print_info(\"Preprocessing the train data...\")\n self._place_dataset(os.path.join(self._hparams[\"temp-data\"], \"train\"),\n self.TRAIN_OUT_PATH)\n\n print_info(\"Preprocessing the test data...\")\n self._place_dataset(os.path.join(self._hparams[\"temp-data\"], \"test\"),\n self.TEST_OUT_PATH)\n\n print_info(\"Preprocessing the validation data...\")\n self._place_dataset(os.path.join(self._hparams[\"temp-data\"], \"val\"),\n self.VAL_OUT_PATH)", "def preProcess(self, datum):\n pass", "def preprocess_valid_data(self):\r\n print(\"* Preprocessing validation data.\", flush=True)\r\n prep.create_HDF_file(self.C.validation_set)\r\n\r\n self.print_time_elapsed()", "def _preprocess(self):\n self.data['sentences'] = self.data['text'].apply(self._tokenize_sent)\n self.data['nouns'] = self.data['sentences'].apply(self._get_nouns)\n # self._get_frequent_features()\n # self._compactness_pruning()\n # self._redundancy_pruning()\n # self._get_features()\n self._extract_opinions()", "def preprocess(self,data):\n preprocessObj = PreprocessData()\n preprocess_data = preprocessObj.preprocess(data)\n return preprocess_data", "def pre_process_data():\n data_list, header_list = Parser.__parse_csv_data(Parser.training_data_file)\n table = pandas.DataFrame(data_list, columns=header_list)\n table.drop(['date', 'employee id'], axis=1, inplace=True)\n unique_categories = table['category'].unique()\n unique_expense_desc = table['expense description'].unique()\n unique_tax_name = table['tax name'].unique()\n\n column_index = {\n 'input': {},\n 'output': {}\n }\n\n column_index['input']['pre-tax amount'] = {\n 'column_index': 0,\n 'type': 'int'\n }\n\n column_index['input']['tax amount'] = {\n 'column_index': 1,\n 'type': 'int'\n }\n\n index = 2\n\n for i in range(len(unique_expense_desc)):\n column_index['input'][unique_expense_desc[i]] = {\n 'column_index': i + index,\n 'type': 'str'\n }\n\n index += len(unique_expense_desc)\n\n for i in range(len(unique_tax_name)):\n column_index['input'][unique_tax_name[i]] = {\n 'column_index': i + index,\n 'type': 'str'\n }\n\n for i in range(len(unique_categories)):\n column_index['output'][unique_categories[i]] = {'value': i}\n\n Parser.__save_column_index(column_index)", "def prepare_data(self):", "def _pre_construct(self, data):\n logging.info(\"pre constructing (enter)\")\n self.ids = collections.defaultdict(set)\n self.collecting = True\n pre_construct_data = self.construct(data)\n self.collecting = False\n logging.info(\"pre constructing (exit)\")\n return pre_construct_data", "def preprocess_train_data(self):\r\n print(\"* Preprocessing training data.\", flush=True)\r\n prep.create_HDF_file(self.C.training_set, is_training_set=True)\r\n\r\n self.print_time_elapsed()", "def data_preprocessing():\n lineid_content = get_lineid_content()\n print('Read movie_lines.txt file complete...')\n convos = get_convos()\n print('Read movie_conversations.txt file complete...')\n print('Building dataset')\n get_data(lineid_content, convos)", "def pre_process(cls, *args, **kwargs):\n pass", "def pre_process(cls, *args, **kwargs):\n pass", "def pre_process(cls, *args, **kwargs):\n pass", "def pre_process(cls, *args, **kwargs):\n pass", "def preprocessing_pipeline(self):\n self.__multilabel_processing()\n self.__split_dataset()\n self.__save_datasets()", "def preprocess(self):\n\n print('[ INFO ]: Preprocessing abalone data...')\n\n # Rename headers of data frame\n abalone_data = pd.read_csv(self.abalone_path, header=None)\n abalone_data.columns = [\n 'sex','length','diameter','height','whole_weight','shucked_weight',\n 'viscera_weight','shell_weight','rings'\n ]\n categorical_features = [\n 'sex'\n ]\n continuous_features = [\n 'length','diameter','height','whole_weight','shucked_weight',\n 'viscera_weight','shell_weight'\n ]\n predictor = 'rings'\n\n df = alg.continuous_to_discrete(self, abalone_data, continuous_features)\n\n classes = abalone_data[predictor].unique().tolist()\n\n features = [df.columns[j] for j in range(len(df.columns)) if df.columns[j] != predictor]\n\n return df, features, predictor, classes", "def pre_processing_(data_df , serialized_objects):\n max_recency_acc_dig = serialized_objects['max_recency_acc_dig'] # These values are taken from trained model values\n max_recency_dig_2yr = serialized_objects['max_recency_dig_2yr'] # These values are taken from trained model values\n max_acc_recency_mf = serialized_objects['max_acc_recency_mf'] #These are values imported in training dataset. Same values needs to be used to impute missing values in unseen data\n\n data_df = data_df.na.fill({\n 'recency_acc_dig' : max_recency_acc_dig, # Filling missing values\n 'recency_dig_2yr' : max_recency_dig_2yr,\n 'acc_recency_mf' : max_acc_recency_mf\n })\n\n freq_acc_upg_2yrs_split = [-float('inf'), 0, 1, 2, float('inf')]\n bucketizer_freq_acc_upg_2yrs = Bucketizer(splits=freq_acc_upg_2yrs_split, inputCol='freq_acc_upg_acc_2yrs', outputCol='freq_acc_upg_acc_2yrs_bkt')\n data_df = bucketizer_freq_acc_upg_2yrs.setHandleInvalid('keep').transform(data_df) # Binning the freq_acc_upg_acc_2yrs column\n\n tot_purchase_split = [-float('inf'), 0, 1, 2, 3, float('inf')]\n bucketizer_tot_purchase = Bucketizer(splits=tot_purchase_split, inputCol='tot_accsry_purchse', outputCol='tot_accsry_purchse_bkt')\n data_df = bucketizer_tot_purchase.setHandleInvalid('keep').transform(data_df) # Binning the tot_accsry_purchse column\n\n del_cols_new = ['freq_acc_upg_acc_2yrs', 'tot_accsry_purchse']\n data_df = data_df.drop(*del_cols_new) # Dropping the older continuous columns\n return data_df", "def preprocess_test_data(self):\r\n print(\"* Preprocessing test data.\", flush=True)\r\n prep.create_HDF_file(self.C.test_set)\r\n\r\n self.print_time_elapsed()", "def _process_data(self):\r\n # Rename columns to match final feature class\r\n self._rename_columns()\r\n # Add point ID column\r\n self._add_pointid()\r\n # Sort rows by transect id and timestamp\r\n self._sort_rows()\r\n # Fill Null records with a value\r\n self._fill_nulls()\r\n # Set site_code to lower case\r\n self._lower_site_code()\r\n # Create survey_id\r\n self._calc_survey_id()\r\n # Calculate nativesg column if at least one of the veg columns is a Native seagrass type\r\n if set(self.veg_columns).intersection(set(NATIVESG_CODES)) > 0:\r\n self.nativesg_columns = list(set(self.veg_columns).intersection(set(NATIVESG_CODES)))\r\n self._calc_nativesg()\r\n #\r", "def _preprocess_training_model(self, data):\n def _pre_process(raw_data):\n \"\"\" Pre-process raw data. \"\"\"\n pattern = re.compile(\n r\"((?<=')\\w\\d.*?(?=')|(?<=\\\")\\w\\d.*?(?=\\\")|[\\w\\d]+)\")\n words = re.findall(pattern, raw_data)\n return ' '.join(list(map(string_utils.snake_case_to_camel, words)))\n\n data_list = []\n # Preprocess the dataset with naming convention, etc.\n with Progress() as progress:\n preprocess_task = progress.add_task('Pre-processing dataset...',\n total=data.shape[0])\n for idx, row in data.iterrows():\n row_data = {}\n for column in ['text', 'key', 'value']:\n row_data[column] = _pre_process(row[column])\n data_list.append(row_data)\n progress.update(preprocess_task, advance=1)\n return pd.DataFrame(data=data_list)", "def pre_process_data(self, all_labels, all_data):\n\n # [1] Normalizes data\n all_data = self.pre_precess_manager.normalization(all_data)\n\n data_train, data_test, label_train, label_test = train_test_split(all_data, all_labels, test_size=0.1,\n shuffle=True)\n\n return data_train, data_test, label_train, label_test", "def run_preprocessing(self, serie):\n pass", "def prepare_data():\n user_name = os.environ.get('USER')\n traintest_corpus = ResumeCorpus('/Users/' + user_name + '/Documents/Data')\n random.shuffle(traintest_corpus.resumes)\n\n for resume in traintest_corpus.resumes:\n try:\n review_text = pre_processing(resume[0])\n review_text = \" \".join(review_text)\n data_dict['data'].append(review_text)\n data_dict['label'].append(resume[1])\n except:\n pass", "def preprocess(self, data, label):\n\t\traise NotImplementedError", "def preprocess_main():", "def preproc_pipeline(data):\n # Preprocess\n data = preprocess(data)\n\n # Optional --> run a technical analysis on it and add more features\n data = generate_ta(data)\n \n # Split\n train_set, validation_set, test_set = train_val_test_split(data)\n \n # Set up for Keras\n train_set = shape_for_keras(train_set)\n validation_set = shape_for_keras(validation_set)\n test_set = shape_for_keras(test_set)\n\n # We could save this to csv.\n return train_set, validation_set, test_set", "def __prepro_testdata(dict_testdata):\n if not dict_testdata or check_df == False:\n test_corpus = list()\n # create connection to testdata and traindata input\n conn_test = connection_preparation.conn_testing()\n # load data for testdata\n dict_testdata = manage_dfs.get_df(conn_test)\n else:\n pass\n\n # PREPROCESSING TESTDATA\n for name, df in dict_testdata.items():\n logging.info(f'preprocessing for testdata table {name} started.')\n test_corpus = preprocessing.preprocess_data(df, step_key)\n dict_testdata_prepro[name] = test_corpus", "def preprocess(\n self, data: List[Dict[str, Any]]\n ) -> Generator[Dict[str, Any], None, None]:\n raise NotImplementedError", "def pre_process(self, frame: pd.DataFrame) -> pd.DataFrame:\n # frame = rows_filtering(frame)\n # frame = feature_dropping(frame)\n # frame = feature_values_fixing(frame)\n\n # frame = extreme_values_handling(frame, [])\n # missing_value_imputation(frame, [])\n\n # data_type_conversion(frame)\n # frame = feature_engineering(frame, self.GENERATE_USER_FEATURES)\n # feature_renaming(frame)\n\n return frame", "def preprocess(self, train_file, validation_file, test_file):\n chardict, labeldict = self.make_dictionary(train_file, validation_file, test_file)\n print 'preparing training data'\n training = self.parse_file(train_file, chardict, labeldict)\n \n print 'preparing validation data'\n validation = self.parse_file(validation_file, chardict, labeldict)\n\n print 'preparing test data'\n test = self.parse_file(test_file, chardict, labeldict)\n\n return Data(training, validation, test, chardict, labeldict)", "def preprocess(self, cfg_pipeline):\n return", "def data_preparation(self) -> None:\n self.logger.info('data cleaning')\n self.logger.info('num of secs: {}, num of ipo_dates: {}, num of secs with prices: {}'.format(\n len(self.data),\n len(self.ipo_dates),\n len(self.prices)\n ))\n excluded = []\n excluded = [i.lower() for i in excluded]\n self.logger.info(f'number of excluded: {len(excluded)}')\n for i in excluded:\n self.data.pop(i)\n for s in self.data:\n # columns with empty assets sum (empty columns and other situations)\n self.data[s].dropna(axis='columns', how='any', subset=['A_0'], inplace=True)\n # columns with descriptions (polish and english names of values)\n self.data[s].drop(self.data[s].columns[[0, 1]], inplace=True, axis=1)\n\n self.logger.info(f'number of secs after cleaning: {len(self.data)}')\n data_list = [k for k in self.data.values()]\n self.uber_data = pd.concat(data_list, ignore_index=True, axis=1)\n self.uber_data = self.uber_data.transpose()\n self.uber_data = self.uber_data.loc[:, pd.notnull(self.uber_data.columns)]", "def main():\n p = DataPreprocessor()\n p.preprocess_and_save_data(p.path_to_file)", "def _preprocess(self, data):\n\n # pipeline: first call the previous statistics:\n if self.previous_statistics is not None:\n data = self.previous_statistics.statistics(data)\n # the first of the statistics need to take list as input, in order to match the API. Then actually the\n # transformations work on np.arrays. In fact the first statistic transforms the list to array. Therefore, the\n # following code needs to be called only if the self statistic is the first, i.e. it does not have a\n # previous_statistic element.\n else:\n data = self._check_and_transform_input(data)\n\n return data", "def prepare_process(self, dataset):\n if dataset is not None:\n pass", "def preprocess(self, real, **kwargs):\n self._preprocessed = True", "def preprocess(self, X):\n X = X.copy()\n predictor_subset = self.predictor_subset.copy()\n if 'all' in predictor_subset:\n predictor_subset = add_all_predictors(predictor_subset, X.columns)\n \n use_temporal = 'temporal' in predictor_subset\n if use_temporal:\n X_temporal = get_temporal_predictors(\n X['TIMESTAMP_END']\n )\n predictor_subset.remove('temporal')\n\n X = X[predictor_subset]\n\n if use_temporal:\n X = pd.concat([X, X_temporal], axis=1)\n\n if 'WD' in predictor_subset:\n X = process_wind_direction_predictor(X)\n\n return X", "def preprocess(data, to_drop=[]):\n \n columns = data.columns.to_list()\n \n # split data to numeric vs categorical\n numeric_features = data.select_dtypes(include=[\n 'int64', 'float64']).columns\n \n if len(to_drop) > 0:\n categorical_features = data.select_dtypes(include=[\n 'object']).drop(to_drop, axis=1).columns\n print(categorical_features)\n else: \n categorical_features = data.select_dtypes(include=[\n 'object']).columns\n \n categorical_transformer = Pipeline(steps=[\n ('imputer', SimpleImputer(strategy='most_frequent', fill_value='missing'))])\n \n numerical_transformer = Pipeline(steps=[\n ('imputer', SimpleImputer(strategy='median')),\n ('scaler', RobustScaler())\n ])\n # missing_values = np.nan\n \n# Bundle preprocessing for numerical and categorical data\n preprocessor = ColumnTransformer(\n transformers=[\n ('num', numerical_transformer, numeric_features),\n ('cat', categorical_transformer, categorical_features)\n ])\n\n my_pipeline = Pipeline(steps=[('preprocessor', preprocessor) ])\n \n for col in to_drop:\n columns.remove(col)\n print('Hello')\n \n trans_data = my_pipeline.fit_transform(data)\n return trans_data#pd.DataFrame(#, columns=columns) ", "def pre_process(self, documents):\n\n return documents", "def preprocess(config: Config) -> None:\n print(colored(\"preprocessing:\", attrs=[\"bold\"]))\n factory = PreprocessingFactory()\n factory.process(config)", "def prepare_data(train, test):\n # change the name of the target column\n train.rename(columns={\"revenue\": \"target\"}, inplace=True)\n # map bool values to yes and no\n train[\"Weekend\"] = train[\"Weekend\"].map({True: \"Yes\", False: \"No\"})\n test[\"Weekend\"] = test[\"Weekend\"].map({True: \"Yes\", False: \"No\"})\n # set the id col as index\n train.set_index(\"id\", inplace=True)\n test.set_index(\"id\", inplace=True)\n\n # seperate the fetures and the target\n X_train = train.drop(\"target\", axis=1).copy()\n y_train = train[\"target\"].copy()\n X_test = test.copy()\n\n # select numerical and categorical columns\n num_cols = X_train.select_dtypes(exclude=\"object\").columns.tolist()\n cat_cols = X_train.select_dtypes(include=\"object\").columns.tolist()\n\n # numerical pipeline\n num_pipe = make_pipeline(SimpleImputer(strategy=\"mean\"))\n\n # categorical pipeline\n cat_pipe = make_pipeline(\n SimpleImputer(strategy=\"constant\", fill_value=\"NA\"),\n OneHotEncoder(handle_unknown=\"ignore\", sparse=False),\n )\n\n # full pipeline for data preprocessing\n full_pipe = ColumnTransformer(\n [(\"num\", num_pipe, num_cols), (\"cat\", cat_pipe, cat_cols)]\n )\n return X_train, y_train, X_test, full_pipe", "def process(self):\n self.extract()\n self.transform()\n self.load()", "def training_data_preprocessing(raw_data, num_passed_rows=72):\r\n # some samples have errors\r\n raw_data = raw_data[num_passed_rows:].reset_index(drop=True) \r\n \r\n # get data output\r\n data_output = raw_data[['Submitby Date Time', 'Challenge Manager', 'Challenge Copilot', 'Posting Date Date', 'Track',\r\n 'Technology List', 'First Place Prize', 'Num Registrations', 'Total Prize']]\r\n data_output, extended_columns = class_binaryzation(data_output)\r\n \r\n # save extended columns to cache\r\n extended_columns_filepath = 'cache/extended_columns.pkl'\r\n with open(extended_columns_filepath, 'wb') as f:\r\n pickle.dump(extended_columns, f)\r\n\r\n num_date_columns_filepath = 'cache/num_date_columns.pkl'\r\n try:\r\n data_output = date_separation1(data_output) \r\n with open(num_date_columns_filepath, 'wb') as f:\r\n pickle.dump(6, f)\r\n\r\n except:\r\n data_output = date_separation2(data_output)\r\n with open(num_date_columns_filepath, 'wb') as f:\r\n pickle.dump(5, f)\r\n\r\n data_output = money_digitalization(data_output)\r\n data_output = get_date_in_days(data_output)\r\n data_output['Days from Posting to Submit'] = data_output['Submitby Date Time Days from 2016'] \\\r\n - data_output['Posting Date Date Days from 2016'] \r\n \r\n # get other output\r\n label_output = pd.DataFrame(columns=['Success'])\r\n success_output = pd.DataFrame(columns=data_output.columns)\r\n failure_output = pd.DataFrame(columns=data_output.columns)\r\n for i in range(len(raw_data)):\r\n if raw_data.loc[i, 'Num Submissions Passed Review'] >= 1:\r\n label_output.loc[i, 'Success'] = 1\r\n success_output.loc[len(success_output)] = data_output.loc[i]\r\n else:\r\n label_output.loc[i, 'Success'] = 0\r\n failure_output.loc[len(failure_output)] = data_output.loc[i]\r\n\r\n return data_output, label_output, success_output, failure_output, extended_columns", "def _construct(self, data):\n logging.info(\"overall constructing (enter)\")\n\n pre_construct_data = self._pre_construct(data)\n # Kickstart the seralizin'.\n\n #if it found no ids, then we can just use the pre construct data\n if any((len(ids) > 0 for label, ids in self.ids.iteritems())):\n self.data = collections.defaultdict(dict)\n\n\n for manip in self.manips:\n manip()\n\n logging.debug(\"constructing (enter)\")\n # extend the output using the collated data we've found\n data = self.construct(data)\n logging.debug(\"constructing (exit)\")\n\n logging.debug(\"overall constructing (exit)\")\n return data\n else:\n logging.debug(\"overall constructing (exit)\")\n return pre_construct_data", "def preprocess(self):\n\n mm_magcoord.add_aacgm_coordinates(self)\n mm_magcoord.add_quasi_dipole_coordinates(self)\n mm_sc.calculate_ecef_velocity(self)\n mm_sc.add_ram_pointing_sc_attitude_vectors(self)\n\n return", "def preprocess(self, df):\n print(\"Started Processing....\")\n # binary conversion\n df.replace(to_replace=\"yes\", value=1, inplace=True)\n df.replace(to_replace=\"no\", value=0, inplace=True)\n\n # replace unknowns with nan\n df = df.replace(to_replace=\"unknown\", value=np.nan)\n # getting the list of columns with nan\n ml = df.columns[df.isna().any()].tolist()\n\n for item in ml:\n # getting the ratio of the index labels\n val = pd.DataFrame(df[item].value_counts(normalize=True))\n\n # index labels in a list\n valr = val.index.tolist()\n # drc.index = valr\n # columns values in a list\n valc = val[item].tolist()\n # replacing the nan values with ratio\n df[item] = df[item].fillna(pd.Series(np.random.choice(valr, p=valc, size=len(df))))\n\n # dependent variable\n dfy = df.iloc[:, -1]\n # independent variable\n dfx = df.iloc[:, :-1]\n\n # converting categorical data to numerical\n dfx = pd.get_dummies(dfx)\n\n # normalizing\n dfx = (dfx - dfx.min()) / (dfx.max() - dfx.min())\n\n dxdy = pd.concat([dfx, dfy], axis=1)\n\n # class balancing\n sm = RandomOverSampler(random_state=42)\n dfx, dfy = sm.fit_sample(dxdy.iloc[:, :-1], dxdy.iloc[:, -1])\n\n # converting to dataframe\n dfx = pd.DataFrame(dfx, columns=dxdy.iloc[:, :-1].columns.values)\n\n # dimensionality reduction\n pca = PCA(n_components=33)\n dfx = pca.fit_transform((dfx))\n\n print(\"Processing Done\")\n\n return dfx, dfy", "def _prepare(self):\n\n if os.path.isfile(\"DATA/diabetes/admission_type_id.csv\") == False:\n download_data()\n\n id_mapping = pd.read_csv(\"DATA/diabetes/admission_type_id.csv\", index_col = 0)\n data = pd.read_csv(\"DATA/diabetes/diabetic_data.csv\")\n\n # binarize admission type\n admdf = pd.DataFrame()\n for adtype, ad_id in zip(id_mapping.description, id_mapping.index):\n admdf[adtype] = (data.admission_type_id == ad_id)\n\n # binarize categorical text columns\n catdf = pd.DataFrame()\n dtype = data.race.dtype # grab datatype\n features = [\"race\", \"gender\", \"age\", \"diabetesMed\", \"insulin\", \"change\", \"readmitted\"]\n for feature in features:\n if data[feature].dtype == dtype:\n catdf = pd.concat([catdf, binarize(data[feature])], axis = 1)\n else:\n catdf = pd.concat([catdf, data[feature]], axis = 1)\n\n # choose non-binary columns\n nonbindf = data[[\"num_medications\", \"num_procedures\", \"num_lab_procedures\", \"number_outpatient\", \n \"number_emergency\", \"number_inpatient\", \"number_diagnoses\"]]\n\n self.data = pd.concat([catdf, admdf, nonbindf], axis = 1)", "def preprocess(data):\n # Data Preprocessing\n data['GDP_scaled']=preprocessing.scale(data['GDP'])\n data['CLPRB_scaled']=preprocessing.scale(data['CLPRB'])\n data['EMFDB_scaled']=preprocessing.scale(data['EMFDB'])\n data['ENPRP_scaled']=preprocessing.scale(data['ENPRP'])\n data['NGMPB_scaled']=preprocessing.scale(data['NGMPB'])\n data['PAPRB_scaled']=preprocessing.scale(data['PAPRB'])\n data['PCP_scaled']=preprocessing.scale(data['PCP'])\n data['ZNDX_scaled']=preprocessing.scale(data['ZNDX'])\n data['OP_scaled']=preprocessing.scale(data['Nominal Price'])\n data['OP2_scaled']=preprocessing.scale(data['Inflation Adjusted Price'])\n\n return data", "def prepare(self):\n if self.opts['verbose']:\n print(\"Preparing dataset (one-time operation)...\")\n # Create paths files and load them back in\n self._build_ID_sets()\n self._create_ID_files()\n self._load_ID_files()\n if self.opts['verbose']:\n print(\"... done with preparing the dataset.\")", "def pre_process_dataset(self):\n sentences = []\n idx = 1\n # Iterates of dataframe to collect sentences and labels\n for index, row in self.df.iterrows():\n # Normalizing and separate words of each sentence\n norm_sentence = self.norm_text(row['comment_text'])\n word_sentences = re.sub(\"[^\\w]\", \" \", norm_sentence).split()\n sentences.append(word_sentences)\n # Creating a word dictionary\n for word in word_sentences:\n if word not in self.word_2_idx:\n self.word_2_idx[word] = idx\n idx += 1\n # Getting all labels and creates a one-hot vector\n row_label = row[['toxic', 'severe_toxic', 'obscene', 'threat', 'insult', 'identity_hate']].values\n self.labels.append(row_label)\n\n # Collect word indexes from prepared word dictionary\n for words_sentence in sentences:\n self.input_data.append([self.word_2_idx[w] for w in words_sentence])", "def pre_process(self, dataset):\n\n # np.empty creates an empty array only. You have to replace this with your code.\n X = np.empty((0,0))\n y = np.empty((0))\n\n if dataset == 0:\n # Implement for the abalone dataset\n df = pd.DataFrame(columns=['sex', 'length', 'diameter', 'height', 'whole_weight', 'shucked_weight', 'viscera_weight', 'shell_weight', 'rings'])\n count = 0\n\n with open('Dataset.data') as file: # reading data from file\n data = file.read()\n\n data = data.split('\\n') # split data into different rows\n data = data[:-1] # last one is empty\n for row in data:\n row = row.split()\n df.loc[count] = row # add in dataframe\n count += 1\n\n df['M'] = np.where(df.sex=='M', 1,0) # genders are turned to a one hot encoding\n df['F'] = np.where(df.sex=='F', 1,0)\n df['I'] = np.where(df.sex=='I', 1,0)\n df = df.drop(['sex'], axis=1)\n df = df.dropna()\n\n df = df.sample(frac=1).reset_index(drop=True) # shuffle dataframe\n\n X = df.drop(['rings'], axis=1)\n X = X.values\n X = X.astype(float)\n y = df['rings'].values\n y = y.astype(float)\n\n elif dataset == 1:\n # Implement for the video game dataset\n df = pd.read_csv('VideoGameDataset - Video_Games_Sales_as_at_22_Dec_2016.csv') # read csv directly into a dataframe\n df1 = df[['Critic_Score', 'User_Score', 'Global_Sales']]\n df1 = df1.dropna()\n df1 = df1[df1.User_Score != 'tbd']\n\n df1 = df1.sample(frac=1).reset_index(drop=True) # shuffle rows\n\n X = df1.drop(['Global_Sales'], axis=1)\n X = X.values\n X = X.astype(float)\n y = df1['Global_Sales'].values\n y = y.astype(float)\n\n elif dataset == 2:\n # Implement for the banknote authentication dataset\n df = pd.DataFrame(columns=['variance', 'skewness', 'curtosis', 'entropy', 'class'])\n count = 0\n\n with open('data_banknote_authentication.txt') as file: # reading file \n data = file.read()\n data = data.split('\\n')\n data = data[:-1]\n for row in data:\n row = row.split(',')\n df.loc[count] = [float(elt) for elt in row[:-1]] + [int(row[-1])] # last column has class so it is int rest are float\n count += 1\n\n df = df.sample(frac=1).reset_index(drop=True) # shuffle dataset\n\n X = df.drop(['class'], axis=1)\n X = X.values\n y = df['class'].values\n y = y.astype(int)\n\n return X, y", "def _preprocess(self):\n # Size of each micro tree: B = 1/4 logn.\n self._block_size = int(1/4 * math.log2(self._size))\n\n # Build a list of ladders and a sparse table for the jump nodes.\n super()._preprocess()\n\n # Decompose the tree into macro tree and micro trees.\n self._micro_macro_decomposition()\n\n # Build simple tables for the micro trees.\n self._build_micro_tree_tables()", "def prepare_data(self, config: TreeConfigParser) -> None:\n self.data = Data(config)\n self.data.prepare_input()\n self.data.prepare_output()", "def _process(self):\n f = osp.join(self.processed_dir, 'pre_filter.pt')\n if osp.exists(f) and torch.load(f) != _repr(self.pre_filter):\n warnings.warn(\n \"The `pre_filter` argument differs from the one used in \"\n \"the pre-processed version of this dataset. If you want to \"\n \"make use of another pre-fitering technique, make sure to \"\n \"delete '{self.processed_dir}' first\")\n\n if files_exist(self.processed_paths): # pragma: no cover\n return\n\n if self.log and 'pytest' not in sys.modules:\n print('Processing...', file=sys.stderr)\n\n makedirs(self.processed_dir)\n self.process()\n\n path = osp.join(self.processed_dir, 'pre_filter.pt')\n torch.save(_repr(self.pre_filter), path)\n\n if self.log and 'pytest' not in sys.modules:\n print('Done!', file=sys.stderr)", "def _pre_fit(self):\n pass", "def preprocess():\n # Load the data\n random.seed(77)\n X,y = make_classification(n_samples=500, n_features=30, n_informative=8, n_redundant=2, \n n_repeated=0, n_classes=3, n_clusters_per_class=2, weights=None, \n flip_y=0.01, class_sep=1.0, hypercube=True, shift=0.0, scale=1.0, \n shuffle=True, random_state=None)\n\n x_train, x_val, y_train, y_val = train_test_split(X, y, random_state=0, test_size=0.25)\n\n # Standardize the data\n scaler = StandardScaler()\n X_train = scaler.fit_transform(x_train)\n X_val = scaler.transform(x_val)\n\n \n return X_train,y_train,X_val,y_val", "def _preprocess(self, data, normalize=False) -> np.ndarray:\n \n preprocessor = StandardScaler() if not normalize else Normalizer()\n\n data = preprocessor.fit_transform(data)\n \n return data", "def preprocess(\n self, data: dd.DataFrame, prep_fn: Callable = preprocessing_flow\n ) -> dd.DataFrame:\n self.data_divisions = data.divisions\n self.ts, series_ddf = distributed_preprocess(\n data, self.flow_config, self.client, prep_fn\n )\n return series_ddf", "def preprocess(self):\n\n print('[ INFO ]: Preprocessing forest fires data...')\n\n # Rename headers of data frame\n forestfires_data = pd.read_csv(self.forestfires_path, header=0)\n forestfires_data.columns = [\n 'x_axis','y_axis','month','day','ffmc','dmc','dc','isi','temp','rh',\n 'wind','rain','area'\n ]\n categorical_features = [\n 'month','day'\n ]\n predictor = 'area'\n\n df = alg.one_hot_encode(self, forestfires_data, categorical_features)\n\n features = [df.columns[j] for j in range(len(df.columns)) if df.columns[j] != predictor]\n\n return df, features, predictor", "def _preparation_workflow(self):\n self._validate_environment()\n self._validate_parameters()\n self._update_verbosity()", "def preprocess(data,scale):\n ##log_transformation\n #data['log_sale_price'] = np.log(data['sale_price'])\n #data['log_lot_area'] = np.log(data['lot_area'])\n #data['house_age'] = data['year_sold']- data['year_built']\n \n y = data['stay']\n \n #sales['log_sale_price'] = np.log(sales['sale_price'])\n #sales['log_lot_area'] = np.log(sales['lot_area'])\n #sales['house_age'] = sales['year_sold']- sales['year_built']\n data_dummy = data.copy()\n \n #dummy coding\n data_scale = pd.get_dummies(data_dummy).drop(columns = ['stay'])\n\n \n #scale the value\n if scale == True:\n S = StandardScaler().fit(data_scale)\n data_scale = S.transform(data_scale)\n \n return y, data_scale", "def preprocess_raw(self):\n pass", "def preprocess(self):\r\n file_name = os.path.join(self.raw_path, \"amazon-amazon-instant-video.json.gz\")\r\n print(f\"file_name: {file_name}\")\r\n if not os.path.exists(file_name):\r\n self.download()\r\n\r\n # parse json data\r\n data = self.get_data_frame_from_gzip_file(file_name)\r\n\r\n # rename columns\r\n data = data.rename(\r\n columns={\r\n \"reviewerID\": DEFAULT_USER_COL,\r\n \"asin\": DEFAULT_ITEM_COL,\r\n \"overall\": DEFAULT_RATING_COL,\r\n \"unixReviewTime\": DEFAULT_TIMESTAMP_COL,\r\n }\r\n )\r\n\r\n # select necessary columns\r\n data = pd.DataFrame(\r\n data,\r\n columns=[\r\n DEFAULT_USER_COL,\r\n DEFAULT_ITEM_COL,\r\n DEFAULT_RATING_COL,\r\n DEFAULT_TIMESTAMP_COL,\r\n ],\r\n )\r\n\r\n self.save_dataframe_as_npz(\r\n data,\r\n os.path.join(self.processed_path, f\"{self.dataset_name}_interaction.npz\"),\r\n )", "def\t_preprocessor(self) :\n\t\tlogging.debug('Beginning preprocessor')\n\t\t\n\t\t# Parse entries from ss class\n\t\tself._parse_initsol()\n\t\tself._parse_modelspace()\n\t\tself._parse_initbound()\n\t\t\n\t\t# Set regressors according to exptype\n\t\tself._set_regressors()\n\n\t\t# Deal with equations\n\t\tself.equations = self.ss.equations\n\n\t\t# Deal with noisy data ??", "def preprocessing(self, preprocessing):\n\n self._preprocessing = preprocessing", "def prepare_train_data(self):\r\n ## Impute rlkpis\r\n print(\"Imputing rlKPI df\")\r\n self.rlkpi.add_target_labels(1)\r\n self.rlkpi.impute_rl_kpis()\r\n\r\n print(\"Add 'met-real-station_no' & met-forecast-station_no to rl_kpis_df\")\r\n self.add_met_real_forecast_station_col_to_rlkpis()\r\n print(\"Merge 'met-real-sampled df to rl kps \")\r\n self.merge_met_real_sampled_df_to_rlkpis()\r\n\r\n ## Imputations for met-forecast\r\n print(\"Impute met-forecast\")\r\n met_forecast_obj = self.metfcast\r\n met_forecast_obj.impute_met_forecast()\r\n\r\n #Merge met forecast data to earlier merged data\r\n print(\"Merge Train data with imputed forecast df\")\r\n self.train_data = pd.merge(self.train_data,\r\n met_forecast_obj.imputed_forecast_df,\r\n on=['datetime-station_no'], indicator=True, how='inner')\r\n print(\"Check any imputation needed\", self.train_data.isna().sum().sum())\r\n self.train_data.drop(['_merge'], axis=1, inplace=True)\r\n self.perform_data_under_sampling(self.train_data)", "def preprocessing(name_file):\n\n db_data = pd.read_csv(name_file).dropna()\n db_data['Timestamp'] = pd.to_datetime(db_data['Timestamp'], unit='s')\n db_data = db_data[db_data['Timestamp'].dt.year >= 2017]\n db_data.reset_index(inplace=True, drop=True)\n db_data = db_data.drop(['Timestamp'], axis=1)\n db_data = db_data[0::60]\n\n n = len(db_data)\n\n # Split data\n train = db_data[0:int(n * 0.7)]\n validation = db_data[int(n * 0.7):int(n * 0.9)]\n test = db_data[int(n * 0.9):]\n\n # Normalize data\n train_mean = train.mean()\n train_std = train.std()\n train = (train - train_mean) / train_std\n validation = (validation - train_mean) / train_std\n test = (test - train_mean) / train_std\n\n return train, validation, test", "def _preprocess(self) -> None:\n self.cache = Cache(self._db, self._collection)", "def pre_process(db):\n conn = sqlite3.connect(db)\n data = pd.read_sql_query(\"Select Delta_T, V1, V2, V3, V4, V5, V6, V7, V8, V9, V10, V11, V12, V13, V14, V15, V16, V17, V18, V19, V20, V21, V22, V23, V24, V25, V26, V27, V28, Amount , Class from transactions;\", conn)\n train_split = int(0.8*len(data))\n train = data[0:train_split]\n test = data[train_split:len(data)]\n train_x = train.loc[:, ['Delta_T', 'V1', 'V2', 'V3', 'V4', 'V5', 'V6', 'V7', 'V8', 'V9', 'V10', 'V11', 'V12', 'V13', 'V14', 'V15', 'V16', 'V17', 'V18', 'V19', 'V20', 'V21', 'V22', 'V23', 'V24', 'V25', 'V26', 'V27', 'V28', 'Amount']]\n train_y = train.loc[:, ['Class']]\n test_x = test.loc[:, ['Delta_T', 'V1', 'V2', 'V3', 'V4', 'V5', 'V6', 'V7', 'V8', 'V9', 'V10', 'V11', 'V12', 'V13', 'V14', 'V15', 'V16', 'V17', 'V18', 'V19', 'V20', 'V21', 'V22', 'V23', 'V24', 'V25', 'V26', 'V27', 'V28', 'Amount']]\n test_y = test.loc[:, ['Class']]\n train_x = train_x.to_numpy()\n train_y = np.squeeze(train_y.to_numpy(),axis=1)\n test_x = test_x.to_numpy()\n test_y = np.squeeze(test_y.to_numpy(),axis=1)\n return train_x,train_y,test_x,test_y", "def process(self):\n\t\tif self.update_check() or self.force_update:\n\t\t\tself.district_check() #pull all local data and regions\n\t\t\tself.fix() #fix data anomalies - e.g add in Bucks.\n\t\t\tself.save_all() #store a copy of the data\n\t\t\tself.ingest() #add data to models\n\t\t\tself.update_totals() #calculate weekly data\n\t\telse:\n\t\t\tlog.info('PHE cases up to date')", "def build_data_set(self):\n if not self.assert_data_correct():\n self.download_all_data()\n self.unpack_rename_data()\n self.split_data_characters()\n self.clean_data_fragments()\n self.create_font_data()\n if not self.assert_train_augmented():\n self.augment_train_data()\n if not self.assert_style_data_correct():\n self.download_style_data()\n self.unpack_rename_data()", "def prepare_data(self):\r\n annual_df = self.annual_df\r\n coef_df = self.coef_df\r\n quarter_df = self.quarter_df\r\n # historical_df = self.historical_df\r\n Event_Buffer = self.Event_Buffer\r\n\r\n Tot_Prod = coef_df[\"Product\"].nunique()\r\n # Tot_Week = coef_df[\"wk\"].nunique()\r\n Tot_Week = 52\r\n\r\n EDLP_Events = list(annual_df[\"RP_Events\"])\r\n Min_EDLP_Events = [\r\n i - Event_Buffer if i - Event_Buffer >= 0 else 0 for i in EDLP_Events\r\n ]\r\n Max_EDLP_Events = [\r\n i + Event_Buffer if i + Event_Buffer < Tot_Week + 1 else Tot_Week\r\n for i in EDLP_Events\r\n ]\r\n\r\n TPR_Events = list(annual_df[\"TPR_Events\"])\r\n Min_TPR_Events = [\r\n i - Event_Buffer if i - Event_Buffer >= 0 else 0 for i in TPR_Events\r\n ]\r\n Max_TPR_Events = [\r\n i + Event_Buffer if i + Event_Buffer < Tot_Week + 1 else Tot_Week\r\n for i in TPR_Events\r\n ]\r\n\r\n Target_EDLP_Spend = [i for i in annual_df[\"PPG_RP_Spend\"]]\r\n Target_TPR_Spend = [i for i in annual_df[\"PPG_TPR_Spend\"]]\r\n Target_Trade_Spend = [i for i in annual_df[\"PPG_Total_Spend\"]]\r\n\r\n Mapping = {}\r\n Prod_Ind = coef_df[\"Product\"][0:Tot_Prod]\r\n for i, j in zip(Prod_Ind.index, Prod_Ind.values):\r\n Mapping[j] = i\r\n Mapping_reverse = {i: j for j, i in Mapping.items()}\r\n\r\n constants = [i for i in coef_df[\"constant\"]]\r\n\r\n Cat_Coef = coef_df[\"Catalogue\"][0:Tot_Prod]\r\n\r\n Disp_Coef = coef_df[\"Display\"][0:Tot_Prod]\r\n\r\n Base_Price_stg1 = [i for i in quarter_df[\"Final_baseprice\"]]\r\n Intercepts_stg1 = []\r\n for pr in range(Tot_Prod):\r\n Intercepts_stg1.append(\r\n np.mean([constants[j * Tot_Prod + pr] for j in range(0, Tot_Week)])\r\n )\r\n\r\n Base_Price_stg2 = [[i] * Tot_Week for i in quarter_df[\"Final_baseprice\"]]\r\n Intercepts_stg2 = [\r\n constants[j : j + Tot_Prod] for j in range(0, len(constants), Tot_Prod)\r\n ] # noqa\r\n\r\n EDLP_Coef = np.array(\r\n coef_df[[i for i in coef_df.columns if i.count(\"Retailer_Regular\") == 1]]\r\n )\r\n TPR_Coef = np.array(\r\n coef_df[[i for i in coef_df.columns if i.count(\"Retailer_Promoted\") == 1]]\r\n )\r\n\r\n # ################################ Available EDLP Interactions pairs ##############################\r\n\r\n EDLP = [\r\n re.findall(r\"[0-9]+\", i)\r\n for i in coef_df.columns\r\n if i.count(\"Retailer_Regular\") > 1\r\n ]\r\n EDLP_Interactions = []\r\n for i in EDLP:\r\n temp = []\r\n for j in i:\r\n temp.append(int(j))\r\n EDLP_Interactions.append(temp)\r\n\r\n # ###################################### Available TPR Interactions pairs #########################\r\n\r\n TPR = [\r\n re.findall(r\"[0-9]+\", i)\r\n for i in coef_df.columns\r\n if i.count(\"Retailer_Promoted\") > 1\r\n ]\r\n TPR_Interactions = []\r\n for i in TPR:\r\n temp = []\r\n for j in i:\r\n temp.append(int(j))\r\n TPR_Interactions.append(temp)\r\n\r\n # ###################################### EDLP_Interaction_Coef_Values ############################\r\n\r\n EDLP_Int_Coef_Values = {}\r\n for col in coef_df.columns:\r\n if col.count(\"Retailer_Regular\") > 1:\r\n Pair_name = \"_\".join([str(int(i)) for i in re.findall(r\"[0-9]+\", col)])\r\n EDLP_Int_Coef_Values[Pair_name] = list(coef_df[col])\r\n\r\n # ###################################### TPR_Interaction_Coef_Values #############################\r\n\r\n TPR_Int_Coef_Values = {}\r\n for col in coef_df.columns:\r\n if col.count(\"Retailer_Promoted\") > 1:\r\n Pair_name = \"_\".join([str(int(i)) for i in re.findall(r\"[0-9]+\", col)])\r\n TPR_Int_Coef_Values[Pair_name] = list(coef_df[col])\r\n\r\n # ##################################### Loading Pantry Loading Coefficients #######################\r\n\r\n Pantry_1 = list(coef_df[\"Pantry_Loading_1\"])\r\n Pantry_1 = [\r\n Pantry_1[j : j + Tot_Prod] for j in range(0, len(Pantry_1), Tot_Prod)\r\n ]\r\n Pantry_2 = list(coef_df[\"Pantry_Loading_2\"])\r\n Pantry_2 = [\r\n Pantry_2[j : j + Tot_Prod] for j in range(0, len(Pantry_2), Tot_Prod)\r\n ]\r\n\r\n # TE_Coeff = np.array(Promo_df[[\"TE_Promo\",\"TE_NoPromo\"]])\r\n self.Tot_Prod = Tot_Prod\r\n self.Tot_Week = Tot_Week\r\n self.EDLP_Events = EDLP_Events\r\n self.Min_EDLP_Events = Min_EDLP_Events\r\n self.Max_EDLP_Events = Max_EDLP_Events\r\n self.TPR_Events = TPR_Events\r\n self.Min_TPR_Events = Min_TPR_Events\r\n self.Max_TPR_Events = Max_TPR_Events\r\n\r\n self.Target_EDLP_Spend = Target_EDLP_Spend\r\n self.Target_TPR_Spend = Target_TPR_Spend\r\n self.Target_Trade_Spend = Target_Trade_Spend\r\n self.Mapping = Mapping\r\n self.Mapping_reverse = Mapping_reverse\r\n self.constants = constants\r\n self.EDLP_Coef = EDLP_Coef\r\n self.TPR_Coef = TPR_Coef\r\n\r\n self.EDLP_Interactions = EDLP_Interactions\r\n self.TPR_Interactions = TPR_Interactions\r\n self.EDLP_Int_Coef_Values = EDLP_Int_Coef_Values\r\n self.TPR_Int_Coef_Values = TPR_Int_Coef_Values\r\n self.Pantry_1 = Pantry_1\r\n self.Pantry_2 = Pantry_2\r\n\r\n self.Base_Price_stg1 = Base_Price_stg1\r\n self.Intercepts_stg1 = Intercepts_stg1\r\n self.Base_Price_stg2 = Base_Price_stg2\r\n self.Intercepts_stg2 = Intercepts_stg2\r\n\r\n self.Cat_Coef = Cat_Coef\r\n self.Disp_Coef = Disp_Coef", "def pre_pipeline(self, results):\n results[\"img_prefix\"] = self.img_prefix\n results[\"seg_prefix\"] = self.seg_prefix\n results[\"proposal_file\"] = self.proposal_file\n results[\"bbox_fields\"] = []\n results[\"mask_fields\"] = []\n results[\"seg_fields\"] = []\n results[\"site_fields\"] = []\n results[\"label_fields\"] = []", "def prepare(self):\n self.parse_template()\n self.build_argparser()\n self.parse_arguments()\n self.render_template()\n self.update_relation()", "def _prepare(self, setup):\n # Initialise cell\n self.cell = self.celltype(model=self._model)\n for rec in setup.record_variables:\n self.cell.record(*rec)\n if 'injected_currents' in setup.conditions:\n for loc, current in setup.conditions['injected_currents'].items():\n getattr(self.cell, loc).inject_current(current)\n if 'voltage_clamps' in setup.conditions:\n for loc, voltages in setup.conditions['voltage_clamps'].items():\n getattr(self.cell, loc).voltage_clamp(voltages)\n if 'synaptic_spikes' in setup.conditions:\n for loc, syn, spkes in setup.conditions['synaptic_spikes'].items():\n getattr(self.cell, loc).synaptic_stimulation(spkes, syn)", "def preprocess(self):\n self.word_to_id, self.unk_word_list = self.build_vocab(mode=\"word\")\n self.word_vocab_size = len(self.word_to_id)\n self.max_word_len = self.get_max_word_length(self.word_to_id)\n # Do not write the same file again\n if not os.path.exists(self.words_vocab_file):\n with open(self.words_vocab_file, 'wb') as f:\n pickle.dump((self.word_to_id, self.unk_word_list), f)\n if self.unit != \"word\":\n self.preprocess_sub_units()", "def prepare_data(self):\n import subprocess\n # Download coco data set into dir specified by config then /data/coco\n subprocess.call([f\"{get_original_cwd()}/bin/fetch_dataset.sh\", f\"{self.dir}/data/coco\", f\"{get_original_cwd()}\"])\n # subprocess.call([f\"bin/fetch_dataset.sh\", f\"{self.dir}/data/coco\"])\n task = \"instances\" if self.instance else \"person_keypoints\"\n register_coco_instances(\"train\", {}, f\"{self.dir}/data/coco/{task}_train2014.json\",\n f\"{self.dir}/data/coco/train2014\")\n register_coco_instances(\"val\", {}, f\"{self.dir}/data/coco/{task}_minival2014.json\",\n f\"{self.dir}/data/coco/val2014\")\n register_coco_instances(\"test\", {}, f\"{self.dir}/data/coco/{task}_valminusminival2014.json\",\n f\"{self.dir}/data/coco/val2014\")", "def preprocess(item):\n item = feature_engineering(item)\n item = encode_features(item)\n return item", "def prepare_training_data(\n self, dir_snippy: Path, dir_ont: Path, caller: str = 'clair',\n break_complex: bool = True, snippy_ext: str = \".ref.vcf\"\n ):\n\n self.training_dir.mkdir(parents=True, exist_ok=True)\n\n comparisons = self.get_coverage_comparisons(dir_snippy=dir_snippy, dir_ont=dir_ont, snippy_ext=snippy_ext)\n\n ont_with_truth, snippies, _ = self.get_data_from_comparisons(\n comparisons=comparisons, caller=caller, break_complex=break_complex, outdir=self.training_dir\n )\n\n features, _ = self.parse_features(ont_calls=ont_with_truth)\n\n # Combined features for training\n self.features_combined = pd.concat(features) # combined feature frames\n self.features_combined = self.features_combined.reset_index(drop=True)\n self.features_combined.to_csv(self.training_dir / 'training_features.tsv', sep='\\t', index=False)", "def prep_data(dataset_config=None):\n \n\n data_struct = DataStruct(dataset_config)\n\n data_name = data_struct.name\n datasets_dir = data_struct.data_path\n out_dir = data_struct.save_path\n\n # If dataset already downloaded an unpacked, do nothing\n if os.path.isdir(out_dir):\n print('{} already downloaded, unpacked and processed.'.format(data_name))\n return\n\n # Check if download is required\n data_url = data_struct.url\n compressed_file_name = downloader(datasets_dir, data_url)\n\n # Unpack compressed dataset file\n unpacker(compressed_file_name, out_dir)\n\n # Custom preprocessing steps for data sets\n custom_preprocessor(out_dir)", "def _preprocess(self):\n\n self.df = self.df[(self.df['days_b_screening_arrest'] <= 30)\n & (self.df['days_b_screening_arrest'] >= -30)\n & (self.df['is_recid'] != -1)\n & (self.df['c_charge_degree'] != 'O')\n & (self.df['score_text'] != 'N/A')]\n\n self.df['c_jail_out'] = pd.to_datetime(self.df['c_jail_out'])\n self.df['c_jail_in'] = pd.to_datetime(self.df['c_jail_in'])\n self.df['length_of_stay'] = (self.df['c_jail_out']\n - self.df['c_jail_in'])\n\n self.df['score_factor'] = np.where(self.df['score_text']\n != 'Low',\n 'HighScore', 'LowScore')\n self.df['y_pred'] = (self.df['score_factor'] == 'HighScore')", "def modelarts_pre_process():\n config.file_name = os.path.join(config.output_path, config.file_name)", "def prep_data():\n loader = DLoader()\n cap = loader.visitor_cnt\n\n pass", "def preprocess(self):\n \n file_name_list = os.listdir(self.image_dir)\n random.seed(1234)\n random.shuffle(file_name_list)\n \n for i,d in enumerate(self.domains):\n self.attr2idx[d]=i \n\n for i, file_name in enumerate(file_name_list):\n if (file_name.startswith('X_')):\n continue\n \n parts = file_name.split(\"-\")\n label = int(parts[0])\n if label not in self.domains:\n continue\n img_name = file_name\n\n count=self.get_sample_count(label)\n if count<self.valid_set_size:\n # create holdout set on the fly\n utils.copy_file(self.image_dir,self.valid_set_dir,img_name)\n else:\n self.dataset.append([img_name, self.attr2idx[label]])\n \n self.increment_sample_count(label)\n\n print(\"Sample count per domain: \"+str(self.sample_count)+\" (including holdout set, holdout size per domain is: \"+str(self.valid_set_size)+\")\")\n print('Finished preprocessing the dataset...')", "def setup(self):\n\n module = [m for m in Rt.modules if m.name == self.module_name][0]\n\n # Take CPACS file from the optimisation\n cpacs_path = module.cpacs_in\n tixi = open_tixi(cpacs_path)\n self.Model = load_surrogate(tixi)\n tixi.save(cpacs_path)\n\n df = self.Model.df\n df.set_index(\"Name\", inplace=True)\n for name in df.index:\n if df.loc[name, \"type\"] == \"obj\":\n self.add_output(name)\n elif df.loc[name, \"type\"] == \"des\":\n self.add_input(name)\n\n self.xd = df.loc[[name for name in df.index if df.loc[name, \"type\"] == \"des\"]]\n self.yd = df.loc[[name for name in df.index if df.loc[name, \"type\"] == \"obj\"]]", "def _dataframe_preprocess(self):\n # 1. add baisc feature like date, time in day, ....\n if self.data_type != 'porto':\n self.df['TIMESTAMP'] = self.df.apply(lambda df: df['TIMESTAMPS'][0], axis=1)\n self.df['TIME'] = pd.to_datetime(self.df['TIMESTAMP'], unit='s', utc=True)\n \n self.df.TIME = self.df.TIME.dt.tz_convert(self.timezone)\n # 2. group df for specific driver analysis\n self.grouped_df = self.df.groupby('LABEL')\n if self.count_od_info:\n if 'SD' not in self.df.columns:\n self._add_OD_info()\n self.grouped_od = self.df.groupby('SD')", "def _process_data(self, db, extent, processing):\n if extent in processing:\n return\n processing.append(extent)\n # Get the data we need to process, and short-circuit if no\n # data is specified.\n data = []\n data_attr = self._data_attr\n if hasattr(extent.EntityClass, data_attr):\n data = getattr(extent.EntityClass, data_attr)\n if callable(data):\n data = data(db)\n if not data:\n return\n # Get the field spec from the extent's create transaction\n # by instantiating a new create transaction.\n create = extent.t.create\n tx = create()\n dict_field_spec = tx._field_spec.copy()\n tuple_field_spec = tx._field_spec.copy()\n # For tuples and dicts, remove fields that don't even exist in\n # the create transaction.\n # For tuples, remove readonly fields since we can't set them,\n # and remove hidden fields since we can't \"see\" them.\n for name in dict_field_spec.keys():\n delete = False\n if not hasattr(tx.f, name):\n # The create transaction's _setup() might delete a\n # field without deleting the field_spec entry.\n delete = True\n else:\n field = getattr(tx.f, name)\n if delete:\n del dict_field_spec[name]\n if delete or field.readonly or field.hidden:\n del tuple_field_spec[name]\n for FieldClass in dict_field_spec.itervalues():\n if issubclass(FieldClass, Entity):\n allow = FieldClass.allow\n for extent_name in allow:\n parent_extent = db.extent(extent_name)\n self._process_data(db, parent_extent, processing)\n # Process the data.\n execute = db.execute\n dict_field_names = dict_field_spec.keys()\n for values in data:\n # Convert values to dict if it's a tuple.\n if isinstance(values, tuple):\n new_values = {}\n for field_name, value in zip(tuple_field_spec.iterkeys(),\n values):\n new_values[field_name] = value\n values = new_values\n # Resolve the dict's values if needed.\n value_map = {}\n for field_name, FieldClass in dict_field_spec.iteritems():\n value = values.get(field_name, DEFAULT)\n if value is not DEFAULT:\n try:\n value = resolve(db, field_name, value, FieldClass,\n dict_field_names)\n except:\n print '-' * 40\n print ' extent:', extent\n print ' data:', data\n print ' values:', values\n print ' while resolving:', value\n raise\n value_map[field_name] = value\n # Assign values in field definition order, so that\n # interactions with field value-changed handlers is\n # deterministic.\n new = create()\n for field_name in dict_field_names:\n if field_name in value_map:\n value = value_map[field_name]\n field = new.f[field_name]\n if (field.readonly\n or getattr(new, field_name) == value\n ):\n # Skip readonly and unchanged fields.\n continue\n setattr(new, field_name, value)\n try:\n execute(new)\n except:\n print '-' * 40\n print ' extent:', extent\n print ' data:', data\n print ' values:', values\n print ' dict_field_spec:', dict_field_spec\n print ' tuple_field_spec:', tuple_field_spec\n print ' value_map:', value_map\n raise" ]
[ "0.7036312", "0.7036312", "0.7036312", "0.7036312", "0.7036312", "0.7025339", "0.7025339", "0.7025339", "0.6963976", "0.6953828", "0.6916517", "0.6750624", "0.661842", "0.66013443", "0.65898615", "0.65819675", "0.65424716", "0.6525521", "0.65110165", "0.64360577", "0.63551754", "0.6324146", "0.63195324", "0.63042873", "0.6269313", "0.6269313", "0.6269313", "0.6269313", "0.62298423", "0.6200814", "0.61185575", "0.6105866", "0.60946095", "0.60826725", "0.6045414", "0.60399437", "0.6039292", "0.60370153", "0.6033026", "0.6032108", "0.6007231", "0.59953374", "0.5987671", "0.5984806", "0.5984667", "0.5943694", "0.59159887", "0.59025675", "0.58886224", "0.58712447", "0.58496815", "0.5848562", "0.5840869", "0.5803971", "0.5803444", "0.57928723", "0.57917535", "0.57825625", "0.57652706", "0.57549137", "0.57522243", "0.5751931", "0.5741659", "0.5734848", "0.57316023", "0.5714322", "0.5706465", "0.5702442", "0.5700541", "0.5698477", "0.5684244", "0.56771564", "0.56695", "0.5664899", "0.5644054", "0.5634592", "0.5634147", "0.56334203", "0.5633314", "0.5632494", "0.56320226", "0.5628526", "0.5621725", "0.5614875", "0.5604109", "0.55926", "0.5589602", "0.5587086", "0.5578938", "0.5572392", "0.55647564", "0.5561484", "0.55588967", "0.55564916", "0.5555014", "0.5552614", "0.5551504", "0.5539279", "0.5538284", "0.5534399", "0.5529532" ]
0.0
-1
Preprocess and generate review data.
def gen_review_data(fp: str) -> None: with open(fp, encoding='utf-8') as f: for line in f: data = json.loads(line) utils.preprocess_raw_json(data) doc = { "_index": "review", "_source": data } yield doc
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def prepare_data():\n user_name = os.environ.get('USER')\n traintest_corpus = ResumeCorpus('/Users/' + user_name + '/Documents/Data')\n random.shuffle(traintest_corpus.resumes)\n\n for resume in traintest_corpus.resumes:\n try:\n review_text = pre_processing(resume[0])\n review_text = \" \".join(review_text)\n data_dict['data'].append(review_text)\n data_dict['label'].append(resume[1])\n except:\n pass", "def preprocess(self):\r\n file_name = os.path.join(self.raw_path, \"amazon-amazon-instant-video.json.gz\")\r\n print(f\"file_name: {file_name}\")\r\n if not os.path.exists(file_name):\r\n self.download()\r\n\r\n # parse json data\r\n data = self.get_data_frame_from_gzip_file(file_name)\r\n\r\n # rename columns\r\n data = data.rename(\r\n columns={\r\n \"reviewerID\": DEFAULT_USER_COL,\r\n \"asin\": DEFAULT_ITEM_COL,\r\n \"overall\": DEFAULT_RATING_COL,\r\n \"unixReviewTime\": DEFAULT_TIMESTAMP_COL,\r\n }\r\n )\r\n\r\n # select necessary columns\r\n data = pd.DataFrame(\r\n data,\r\n columns=[\r\n DEFAULT_USER_COL,\r\n DEFAULT_ITEM_COL,\r\n DEFAULT_RATING_COL,\r\n DEFAULT_TIMESTAMP_COL,\r\n ],\r\n )\r\n\r\n self.save_dataframe_as_npz(\r\n data,\r\n os.path.join(self.processed_path, f\"{self.dataset_name}_interaction.npz\"),\r\n )", "def _preprocess(self):\n self.data['sentences'] = self.data['text'].apply(self._tokenize_sent)\n self.data['nouns'] = self.data['sentences'].apply(self._get_nouns)\n # self._get_frequent_features()\n # self._compactness_pruning()\n # self._redundancy_pruning()\n # self._get_features()\n self._extract_opinions()", "def data_preprocessing():\n lineid_content = get_lineid_content()\n print('Read movie_lines.txt file complete...')\n convos = get_convos()\n print('Read movie_conversations.txt file complete...')\n print('Building dataset')\n get_data(lineid_content, convos)", "def prepare_review_data():\n with open(REVIEW_FILE, 'r') as fread:\n reviews = fread.read()\n with open(LABEL_FILE, 'r') as fread:\n labels = fread.read()\n return reviews, labels", "def _build_preprocessing(self):\n\n # For now, do nothing\n pass", "def preprocess_data(self):\n\n self._preprocess_train_data()\n self._preprocess_test_data()", "def preprocess(self):\n pass", "def preprocess(self):\n pass", "def preprocess(self):\n pass", "def preprocess(self):", "def _process_reviews(self):\n params = {'add_special_tokens': True, 'max_length': self._max_len,\n 'return_token_type_ids': False, 'pad_to_max_length': True,\n 'return_attention_mask': True, 'return_tensors': 'pt'}\n\n self._tokens = self._review.apply(self._tokenizer.encode_plus, **params)", "def preprocess_train_data(self):\r\n print(\"* Preprocessing training data.\", flush=True)\r\n prep.create_HDF_file(self.C.training_set, is_training_set=True)\r\n\r\n self.print_time_elapsed()", "def preprocess(review):\n review=review.lower()\n \n review = re.sub(r'[^\\w\\s]', \"\", review)\n collection=[]\n for x in review.split(' '):\n if x not in stop_words:\n collection.append(x)\n #processed_review=' '.join(x for x in collection)\n processed_review=collection\n return processed_review", "def prepare_data(raw=False, round_ratings=False):\n print('Downloading input data...')\n if raw:\n # read data\n review = get_input(\n 'https://www.dropbox.com/s/mtln9b6udoydn2h/yelp_academic \\\n _dataset_review_sample.csv?dl=1')\n user = get_input(\n 'https://www.dropbox.com/s/pngrptljotqm4ds/yelp_academic \\\n _dataset_user.json?dl=1')\n business = get_input(\n 'https://www.dropbox.com/s/w0wy854u5swrhmc/yelp_academic \\\n _dataset_business.json?dl=1')\n\n # join datasets\n review_user = pd.merge(\n review, user, on=\"user_id\",\n how=\"left\", suffixes=(\"\", \"_user\"))\n review_user_business = pd.merge(\n review_user, business, on=\"business_id\",\n how=\"left\", suffixes=(\"\", \"_business\"))\n review_user_business = review_user_business[[\n 'user_id', 'business_id',\n 'stars', 'text',\n 'name', 'average_stars',\n 'name_business', 'stars_business',\n 'categories', 'state', 'city']]\n else:\n review_user_business = get_input(\n 'https://www.dropbox.com/s/sj445d95lljuc4p/small_sample.parquet?dl=1'\n )\n if round_ratings:\n # bucketize numeric features to reduce dimensions\n review_user_business['average_stars'] = review_user_business[\n 'average_stars'].apply(lambda x: round_of_rating(x))\n review_user_business['stars_business'] = review_user_business[\n 'stars_business'].apply(lambda x: round_of_rating(x))\n return review_user_business", "def preprocess_valid_data(self):\r\n print(\"* Preprocessing validation data.\", flush=True)\r\n prep.create_HDF_file(self.C.validation_set)\r\n\r\n self.print_time_elapsed()", "def _preprocess_training_model(self, data):\n def _pre_process(raw_data):\n \"\"\" Pre-process raw data. \"\"\"\n pattern = re.compile(\n r\"((?<=')\\w\\d.*?(?=')|(?<=\\\")\\w\\d.*?(?=\\\")|[\\w\\d]+)\")\n words = re.findall(pattern, raw_data)\n return ' '.join(list(map(string_utils.snake_case_to_camel, words)))\n\n data_list = []\n # Preprocess the dataset with naming convention, etc.\n with Progress() as progress:\n preprocess_task = progress.add_task('Pre-processing dataset...',\n total=data.shape[0])\n for idx, row in data.iterrows():\n row_data = {}\n for column in ['text', 'key', 'value']:\n row_data[column] = _pre_process(row[column])\n data_list.append(row_data)\n progress.update(preprocess_task, advance=1)\n return pd.DataFrame(data=data_list)", "def preprocessing_fn(inputs):\n review = inputs[const.REVIEW_COLUMN]\n review_tokens = tft.map(lambda x: tf.string_split(x, delimiters), review)\n review_indices = tft.string_to_int(review_tokens, top_k=vocab_size)\n # Add one for the oov bucket created by string_to_int.\n review_weight = tft.tfidf_weights(review_indices, vocab_size + 1)\n\n output = {\n const.REVIEW_COLUMN: review_indices,\n const.REVIEW_WEIGHT: review_weight,\n const.LABEL_COLUMN: inputs[const.LABEL_COLUMN]\n }\n return output", "def test_data_preprocessing(raw_data): \r\n\r\n # get data output\r\n data_output = raw_data[['Submitby Date Time', 'Challenge Manager', 'Challenge Copilot', 'Posting Date Date', 'Track',\r\n 'Technology List', 'First Place Prize', 'Num Registrations', 'Total Prize']]\r\n with open('cache/extended_columns.pkl', 'rb') as f:\r\n extended_columns = pickle.load(f)\r\n with open('cache/num_date_columns.pkl', 'rb') as f:\r\n max_date_columns = pickle.load(f)\r\n \r\n data_output = class_binaryzation_for_test(data_output, extended_columns)\r\n try:\r\n data_output = date_separation1(data_output, max_num_columns=NUM_DATE_COLUMNS)\r\n except:\r\n data_output = date_separation2(data_output)\r\n data_output = money_digitalization(data_output)\r\n data_output = get_date_in_days(data_output)\r\n data_output['Days from Posting to Submit'] = data_output['Submitby Date Time Days from 2016'] \\\r\n - data_output['Posting Date Date Days from 2016'] \r\n\r\n return data_output", "def preprocess():\n\n # Read Training and Test Data\n train_df = pd.read_csv(configs.TRAIN_DATA_PATH)\n test_df = pd.read_csv(configs.TEST_DATA_PATH)\n\n # Data Exploration and Preprocessing\n user_ids = train_df['userId']\n movie_ids = train_df['movieId']\n ratings = train_df['rating']\n\n # Create the mapping and save it for later usage in testing\n all_users = create_mapping(pd.concat([train_df['userId'], test_df['userId']], axis = 0))\n all_movies = create_mapping(pd.concat([train_df['movieId'], test_df['movieId']], axis = 0))\n\n\n # Save the mapping arrays for users and movies\n with open(configs.USER_IDS_PATH, \"wb\") as f:\n np.save(f, all_users)\n\n with open(configs.MOVIE_IDS_PATH, \"wb\") as f:\n np.save(f, all_movies)\n\n\n # Resetting the ids of training data to [0-n]\n user_ids = map_ids(user_ids, users = True)\n movie_ids = map_ids(movie_ids, users = False)\n\n\n # Resetting the ids of test data\n test_user_ids = map_ids(test_df['userId'], users = True)\n test_movie_ids = map_ids(test_df['movieId'], users = False)\n test_ratings = test_df['rating']\n\n # Statistics of training data\n # Number of users and movies can be extracted from the array of mapped ids\n n_users = np.max(user_ids) + 1\n n_movies = np.max(movie_ids) + 1\n\n # Statistics of test data\n test_n_users = np.max(test_user_ids) + 1\n test_n_movies = np.max(test_movie_ids) + 1\n\n # Returning the indices back can be done using unmap_ids function\n # Example\n # unmap_ids(movie_ids, users = False)\n # unmap_ids(user_ids, users = True)\n\n # Define the training rating matrix as sparse matrix\n\n # Sparse rating matrix from training data\n R = sparse.coo_matrix(\n (ratings, (user_ids, movie_ids)),\n shape=(n_users, n_movies),\n dtype=np.float\n )\n\n\n\n # Save the rating matrix for training data\n sparse.save_npz(configs.R_TRAIN_MATRIX_PATH, R)\n\n # Sparse rating matrix from test data\n R2 = sparse.coo_matrix(\n (test_ratings, (test_user_ids, test_movie_ids)),\n shape=(test_n_users, test_n_movies),\n dtype=np.float\n )\n\n # Save the rating matrix for test data\n sparse.save_npz(configs.R_TEST_MATRIX_PATH, R2)\n\n # return user_ids, movie_ids, ratings, n_users, n_movies, R, R2\n return R, R2", "def pre_process_food_review_data():\n each_food_review_document = {}\n with open('foods.txt') as f:\n for each_line in f:\n try:\n each_line = each_line.strip()\n if each_line:\n if each_line.startswith('product/productId'):\n if each_food_review_document:\n pre_processed_food_review_data.append(each_food_review_document)\n\n each_food_review_document = {}\n product_info = each_line.split(':')\n each_food_review_document[product_info[0]] = product_info[1]\n else:\n food_review_info = each_line.split(':')\n each_food_review_document[food_review_info[0]] = food_review_info[1]\n except Exception:\n print each_line", "def single_review_prep(text):\n clean_test = data_cleaner(text)\n dummy_dict= {'star': [clean_test]}\n clean_test_df = pd.DataFrame(dummy_dict)\n return clean_test_df", "def preprocess(data):\n raise NotImplementedError", "def preprocess(self,data):\n preprocessObj = PreprocessData()\n preprocess_data = preprocessObj.preprocess(data)\n return preprocess_data", "def preprocess_test_data(self):\r\n print(\"* Preprocessing test data.\", flush=True)\r\n prep.create_HDF_file(self.C.test_set)\r\n\r\n self.print_time_elapsed()", "def main():\n p = DataPreprocessor()\n p.preprocess_and_save_data(p.path_to_file)", "def pre_process(self):\n pass", "def pre_process(self):\n pass", "def pre_process(self):\n pass", "def pre_process(self):\n pass", "def pre_process(self):\n pass", "def _prepare_data(self):\n #TODO hardcoded values need to change\n print_info(\"Preprocessing the train data...\")\n self._place_dataset(os.path.join(self._hparams[\"temp-data\"], \"train\"),\n self.TRAIN_OUT_PATH)\n\n print_info(\"Preprocessing the test data...\")\n self._place_dataset(os.path.join(self._hparams[\"temp-data\"], \"test\"),\n self.TEST_OUT_PATH)\n\n print_info(\"Preprocessing the validation data...\")\n self._place_dataset(os.path.join(self._hparams[\"temp-data\"], \"val\"),\n self.VAL_OUT_PATH)", "def pre_process_data():\n data_list, header_list = Parser.__parse_csv_data(Parser.training_data_file)\n table = pandas.DataFrame(data_list, columns=header_list)\n table.drop(['date', 'employee id'], axis=1, inplace=True)\n unique_categories = table['category'].unique()\n unique_expense_desc = table['expense description'].unique()\n unique_tax_name = table['tax name'].unique()\n\n column_index = {\n 'input': {},\n 'output': {}\n }\n\n column_index['input']['pre-tax amount'] = {\n 'column_index': 0,\n 'type': 'int'\n }\n\n column_index['input']['tax amount'] = {\n 'column_index': 1,\n 'type': 'int'\n }\n\n index = 2\n\n for i in range(len(unique_expense_desc)):\n column_index['input'][unique_expense_desc[i]] = {\n 'column_index': i + index,\n 'type': 'str'\n }\n\n index += len(unique_expense_desc)\n\n for i in range(len(unique_tax_name)):\n column_index['input'][unique_tax_name[i]] = {\n 'column_index': i + index,\n 'type': 'str'\n }\n\n for i in range(len(unique_categories)):\n column_index['output'][unique_categories[i]] = {'value': i}\n\n Parser.__save_column_index(column_index)", "def training_data_preprocessing(raw_data, num_passed_rows=72):\r\n # some samples have errors\r\n raw_data = raw_data[num_passed_rows:].reset_index(drop=True) \r\n \r\n # get data output\r\n data_output = raw_data[['Submitby Date Time', 'Challenge Manager', 'Challenge Copilot', 'Posting Date Date', 'Track',\r\n 'Technology List', 'First Place Prize', 'Num Registrations', 'Total Prize']]\r\n data_output, extended_columns = class_binaryzation(data_output)\r\n \r\n # save extended columns to cache\r\n extended_columns_filepath = 'cache/extended_columns.pkl'\r\n with open(extended_columns_filepath, 'wb') as f:\r\n pickle.dump(extended_columns, f)\r\n\r\n num_date_columns_filepath = 'cache/num_date_columns.pkl'\r\n try:\r\n data_output = date_separation1(data_output) \r\n with open(num_date_columns_filepath, 'wb') as f:\r\n pickle.dump(6, f)\r\n\r\n except:\r\n data_output = date_separation2(data_output)\r\n with open(num_date_columns_filepath, 'wb') as f:\r\n pickle.dump(5, f)\r\n\r\n data_output = money_digitalization(data_output)\r\n data_output = get_date_in_days(data_output)\r\n data_output['Days from Posting to Submit'] = data_output['Submitby Date Time Days from 2016'] \\\r\n - data_output['Posting Date Date Days from 2016'] \r\n \r\n # get other output\r\n label_output = pd.DataFrame(columns=['Success'])\r\n success_output = pd.DataFrame(columns=data_output.columns)\r\n failure_output = pd.DataFrame(columns=data_output.columns)\r\n for i in range(len(raw_data)):\r\n if raw_data.loc[i, 'Num Submissions Passed Review'] >= 1:\r\n label_output.loc[i, 'Success'] = 1\r\n success_output.loc[len(success_output)] = data_output.loc[i]\r\n else:\r\n label_output.loc[i, 'Success'] = 0\r\n failure_output.loc[len(failure_output)] = data_output.loc[i]\r\n\r\n return data_output, label_output, success_output, failure_output, extended_columns", "def preprocess(self, data_f):\n \n return self.vec.transform(data_f.review)", "def preprocess(dataset):\n preped_dataset = {}\n preped_dataset['c'] = preprocess_set(dataset['c'])\n preped_dataset['r'] = preprocess_set(dataset['r'])\n preped_dataset['y'] = dataset['y']\n return preped_dataset", "def preprocess_main():", "def __prepro_testdata(dict_testdata):\n if not dict_testdata or check_df == False:\n test_corpus = list()\n # create connection to testdata and traindata input\n conn_test = connection_preparation.conn_testing()\n # load data for testdata\n dict_testdata = manage_dfs.get_df(conn_test)\n else:\n pass\n\n # PREPROCESSING TESTDATA\n for name, df in dict_testdata.items():\n logging.info(f'preprocessing for testdata table {name} started.')\n test_corpus = preprocessing.preprocess_data(df, step_key)\n dict_testdata_prepro[name] = test_corpus", "def inference_preprocess(self):\n return", "def pre_process_reviews(csv, outputname):\n df = pd.read_csv(csv)\n df = df.drop(\"Unnamed: 0\", axis='columns')\n df.to_csv(outputname, index=False)", "def preprocess(self, train_file, validation_file, test_file):\n chardict, labeldict = self.make_dictionary(train_file, validation_file, test_file)\n print 'preparing training data'\n training = self.parse_file(train_file, chardict, labeldict)\n \n print 'preparing validation data'\n validation = self.parse_file(validation_file, chardict, labeldict)\n\n print 'preparing test data'\n test = self.parse_file(test_file, chardict, labeldict)\n\n return Data(training, validation, test, chardict, labeldict)", "def predict(self, review):\n raise NotImplementedError", "def preproc_pipeline(data):\n # Preprocess\n data = preprocess(data)\n\n # Optional --> run a technical analysis on it and add more features\n data = generate_ta(data)\n \n # Split\n train_set, validation_set, test_set = train_val_test_split(data)\n \n # Set up for Keras\n train_set = shape_for_keras(train_set)\n validation_set = shape_for_keras(validation_set)\n test_set = shape_for_keras(test_set)\n\n # We could save this to csv.\n return train_set, validation_set, test_set", "def run_preprocessor():\n\n prepro = PP()\n df = prepro.load_data()\n\n df.subject_ch = df.subject_ch.map(lambda x: convert_emoticons(str(x)))\n\n df = prepro.remove_html_tags()\n\n\n df = remove_unwanted_columns(df)\n df[\"weeks\"] = df['created_ch'].dt.week\n\n # remove characteres\n df = prepro.normalization()\n\n # run tokenizer\n df = prepro.tokenizations()\n\n # remove characteres\n df = prepro.stop_word_remover()\n\n # remove characteres\n df = prepro.stemming_lemmatization()\n\n return df\n\n\n\n\n # def replace_week_numbers(df):\n # \"\"\"\n # functiion that change week number from 1 to 19\n # \"\"\"\n \n # return sorted(df.weeks.unique())\n\n # df[\"weeks_num\"] = df[\"weeks\"].map(lambda x: replace_week_numbers(df).index(x)+1 if(x in replace_week_numbers(df)) else np.nan)", "def pre_process_dataset(self):\n sentences = []\n idx = 1\n # Iterates of dataframe to collect sentences and labels\n for index, row in self.df.iterrows():\n # Normalizing and separate words of each sentence\n norm_sentence = self.norm_text(row['comment_text'])\n word_sentences = re.sub(\"[^\\w]\", \" \", norm_sentence).split()\n sentences.append(word_sentences)\n # Creating a word dictionary\n for word in word_sentences:\n if word not in self.word_2_idx:\n self.word_2_idx[word] = idx\n idx += 1\n # Getting all labels and creates a one-hot vector\n row_label = row[['toxic', 'severe_toxic', 'obscene', 'threat', 'insult', 'identity_hate']].values\n self.labels.append(row_label)\n\n # Collect word indexes from prepared word dictionary\n for words_sentence in sentences:\n self.input_data.append([self.word_2_idx[w] for w in words_sentence])", "def preproc_doc(document):\n\n # Each document is a list of lines\n tokenizer = tokenization.FullTokenizer(\n vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case)\n\n # set a random seed for reproducability\n # since this function is run in parallel, if we hardcode a seed, all\n # documents will have the same permutations. Instead we use the hash of the\n # first sentence as the seed so it is different for each document and it\n # is still reproducible.\n hash_object = hashlib.md5(document[0])\n rng = random.Random(int(hash_object.hexdigest(), 16) % (10**8))\n\n # Each document is composed of a list of sentences. We create paragraphs\n # by keeping together sentences on the same line and adding adjacent sentences\n # if there are fewer than 5 to form the paragraph.\n # The utility functions below expect the document to be split by paragraphs.\n list_of_paragraphs = []\n paragraph = []\n for line in document:\n line = tokenization.convert_to_unicode(line)\n line = line.replace(u\"\\u2018\", \"'\").replace(u\"\\u2019\", \"'\")\n sents = split_line_by_sentences(line)\n for sent in sents:\n tokens = tokenizer.tokenize(sent)\n if tokens:\n paragraph.append(tokens)\n if len(paragraph) > 5:\n list_of_paragraphs.append(paragraph)\n paragraph = []\n\n # In case of any empty paragraphs, remove them.\n list_of_paragraphs = [x for x in list_of_paragraphs if x]\n\n # Convert the list of paragraphs into TrainingInstance object\n # See preprocessing_utils.py for definition\n if FLAGS.format == FORMAT_BINARY:\n instances = create_instances_from_document(list_of_paragraphs,\n FLAGS.max_seq_length, rng)\n elif FLAGS.format == FORMAT_PARAGRAPH:\n instances = create_paragraph_order_from_document(list_of_paragraphs,\n FLAGS.max_seq_length, rng)\n\n # Convert token lists into ids and add any needed tokens and padding for BERT\n tf_examples = [\n convert_instance_to_tf_example(tokenizer, instance,\n FLAGS.max_seq_length)[0]\n for instance in instances\n ]\n\n # Serialize TFExample for writing to file.\n tf_examples = [example.SerializeToString() for example in tf_examples]\n\n return tf_examples", "def prepare_student_data(self) -> dict:\n self._filename_pre_data()\n empty_student = {}\n empty_student[\"scoreTimestamp\"] = \"N/A\"\n for i in self.draft_out:\n empty_student[i] = \"N/A\"\n for i in self.pre_data:\n empty_student[i] = self.pre_data[i]\n self.pre_data = empty_student", "def preprocess():\n # Load the data\n random.seed(77)\n X,y = make_classification(n_samples=500, n_features=30, n_informative=8, n_redundant=2, \n n_repeated=0, n_classes=3, n_clusters_per_class=2, weights=None, \n flip_y=0.01, class_sep=1.0, hypercube=True, shift=0.0, scale=1.0, \n shuffle=True, random_state=None)\n\n x_train, x_val, y_train, y_val = train_test_split(X, y, random_state=0, test_size=0.25)\n\n # Standardize the data\n scaler = StandardScaler()\n X_train = scaler.fit_transform(x_train)\n X_val = scaler.transform(x_val)\n\n \n return X_train,y_train,X_val,y_val", "def main(input_filepath, output_filepath):\n logger = logging.getLogger(__name__)\n logger.info('making final data set from raw data')\n\n conn = sqlite3.connect('../raw/td_V2.db')\n git_commits = pd.read_sql_query(\"SELECT * FROM GIT_COMMITS\",conn)\n szz_fault_inducing_commits = pd.read_sql_query(\"SELECT * FROM szz_fault_inducing_commits\",conn)\n refactoring_miner = pd.read_sql_query(\"SELECT * FROM refactoring_miner\",conn)\n refactoring_miner = refactoring_miner[refactoring_miner[\"COMMIT_HASH\"].isin(git_commits[\"COMMIT_HASH\"])]\n git_commits_changes = pd.read_sql_query(\"SELECT * FROM GIT_COMMITS_CHANGES\", conn)\n git_commits_changes = git_commits_changes[git_commits_changes[\"COMMIT_HASH\"].isin(refactoring_miner[\"COMMIT_HASH\"])]\n\n preprocess(git_commits, szz_fault_inducing_commits, refactoring_miner, git_commits_changes)", "def _load_data(self):\n self.mapper = Mapper()\n self.mapper.generate_vocabulary(self.review_summary_file)\n self.X_fwd, self.X_bwd, self.Y = self.mapper.get_tensor(reverseflag=True)\n # Store all the mapper values in a dict for later recovery\n self.mapper_dict = dict()\n self.mapper_dict['seq_length'] = self.mapper.get_seq_length()\n self.mapper_dict['vocab_size'] = self.mapper.get_vocabulary_size()\n self.mapper_dict['rev_map'] = self.mapper.get_reverse_map()\n # Split into test and train data\n self._split_train_tst()", "def preprocess(args, dataset, process_splits=('train', 'dev', 'test'), print_aggregated_stats=False, verbose=False):\n text_tokenize, program_tokenize, post_process, table_utils = tok.get_tokenizers(args)\n parsed_programs = load_parsed_sqls(args, augment_with_wikisql=args.augment_with_wikisql)\n num_parsed_programs = len(parsed_programs)\n\n vocabs = load_vocabs(args)\n\n schema_graphs = dataset['schema']\n schema_graphs.lexicalize_graphs(\n tokenize=text_tokenize, normalized=(args.model_id in [VASE, SQLOVA, RATSQL]))\n\n # 32 dbs, 119 table pairs contain ambiguities\n # num_ambs = 0\n # amb_dbs = set()\n # for db_name in schema_graphs.db_index:\n # schema_graph = schema_graphs[db_name]\n # for key in schema_graph.foreign_key_index:\n # if len(schema_graph.foreign_key_index[key]) > 1:\n # print(schema_graph.get_table(key[0]).name, schema_graph.get_table(key[1]).name)\n # for i, (f1, f2) in enumerate(schema_graph.foreign_key_index[key]):\n # print('Key pair {}: {}, {}'.format(i, schema_graph.get_field(f1).name,\n # schema_graph.get_field(f2).name))\n # amb_dbs.add(schema_graph.base_name)\n # num_ambs += 1\n # print('{} foreign key ambiguities'.format(num_ambs))\n # print('Foreign key ambiguity detected in {} databases'.format(len(amb_dbs)))\n # import pdb\n # pdb.set_trace()\n\n ############################\n # data statistics\n num_oov = 0\n num_examples = 0\n num_denormalization_failed = 0\n num_schema_truncated = 0\n num_picklist_matched = []\n max_ptr_span_size = 0\n num_text_tokens, num_input_tokens, num_cm_tokens, num_cm_wf_tokens = [], [], [], []\n ############################\n\n # parallel data\n for split in process_splits:\n if not split in dataset:\n print(f\"{split} split not in dataset...\")\n continue\n stats = preprocess_split(dataset, split, args, parsed_programs,\n text_tokenize, program_tokenize, post_process, table_utils,\n schema_graphs, vocabs, verbose=verbose)\n ############################\n # update data statistics\n num_oov_split = stats[0]\n num_denormalization_failed_split = stats[1]\n num_schema_truncated_split = stats[2]\n num_picklist_matched_split = stats[3]\n max_ptr_span_size_split = stats[4]\n num_text_tokens_split, num_input_tokens_split, num_cm_tokens_split, num_cm_wf_tokens_split = stats[5:]\n num_oov += num_oov_split\n num_examples += len(dataset[split])\n num_denormalization_failed += num_denormalization_failed_split\n num_schema_truncated += num_schema_truncated_split\n num_picklist_matched += num_picklist_matched_split\n if max_ptr_span_size_split > max_ptr_span_size:\n max_ptr_span_size = max_ptr_span_size_split\n num_text_tokens += num_text_tokens_split\n num_input_tokens += num_input_tokens_split\n num_cm_tokens += num_cm_tokens_split\n num_cm_wf_tokens += num_cm_wf_tokens_split\n ############################\n\n # if len(parsed_programs) > num_parsed_programs:\n # save_parsed_sqls(args, parsed_programs)\n\n #FORCE SAVE THE PARSES\n parsed_json = os.path.join(args.data_dir, '{}.parsed.json'.format(args.dataset_name))\n if not os.path.exists(parsed_json):\n print(f\"* save the parsed sqls !!! \")\n save_parsed_sqls(args, parsed_programs)\n\n\n if print_aggregated_stats:\n print_data_statistics(num_oov, num_examples, num_denormalization_failed, num_schema_truncated,\n max_ptr_span_size, num_text_tokens, num_input_tokens, num_cm_tokens, num_cm_wf_tokens)\n\n out_pkl = get_processed_data_path(args)\n with open(out_pkl, 'wb') as o_f:\n pickle.dump(dataset, o_f)\n print('Processed data dumped to {}'.format(out_pkl))", "def analyse(self, reviews):\n if self._analyser is None:\n raise ModelNotTrained\n self.__clean_vars()\n bodies = self.__extract_review_body(reviews)\n predictions = self._analyser.predict(bodies)\n predictions = list(map(self.__normalize_prediction, predictions))\n self.__calculate_word_polarity(reviews, predictions)\n self.__calculate_global_karma(predictions)", "def preProcess(self, datum):\n pass", "def _pre_construct(self, data):\n logging.info(\"pre constructing (enter)\")\n self.ids = collections.defaultdict(set)\n self.collecting = True\n pre_construct_data = self.construct(data)\n self.collecting = False\n logging.info(\"pre constructing (exit)\")\n return pre_construct_data", "def pre_process(self, dataset):\n\n # np.empty creates an empty array only. You have to replace this with your code.\n X = np.empty((0,0))\n y = np.empty((0))\n\n if dataset == 0:\n # Implement for the abalone dataset\n df = pd.DataFrame(columns=['sex', 'length', 'diameter', 'height', 'whole_weight', 'shucked_weight', 'viscera_weight', 'shell_weight', 'rings'])\n count = 0\n\n with open('Dataset.data') as file: # reading data from file\n data = file.read()\n\n data = data.split('\\n') # split data into different rows\n data = data[:-1] # last one is empty\n for row in data:\n row = row.split()\n df.loc[count] = row # add in dataframe\n count += 1\n\n df['M'] = np.where(df.sex=='M', 1,0) # genders are turned to a one hot encoding\n df['F'] = np.where(df.sex=='F', 1,0)\n df['I'] = np.where(df.sex=='I', 1,0)\n df = df.drop(['sex'], axis=1)\n df = df.dropna()\n\n df = df.sample(frac=1).reset_index(drop=True) # shuffle dataframe\n\n X = df.drop(['rings'], axis=1)\n X = X.values\n X = X.astype(float)\n y = df['rings'].values\n y = y.astype(float)\n\n elif dataset == 1:\n # Implement for the video game dataset\n df = pd.read_csv('VideoGameDataset - Video_Games_Sales_as_at_22_Dec_2016.csv') # read csv directly into a dataframe\n df1 = df[['Critic_Score', 'User_Score', 'Global_Sales']]\n df1 = df1.dropna()\n df1 = df1[df1.User_Score != 'tbd']\n\n df1 = df1.sample(frac=1).reset_index(drop=True) # shuffle rows\n\n X = df1.drop(['Global_Sales'], axis=1)\n X = X.values\n X = X.astype(float)\n y = df1['Global_Sales'].values\n y = y.astype(float)\n\n elif dataset == 2:\n # Implement for the banknote authentication dataset\n df = pd.DataFrame(columns=['variance', 'skewness', 'curtosis', 'entropy', 'class'])\n count = 0\n\n with open('data_banknote_authentication.txt') as file: # reading file \n data = file.read()\n data = data.split('\\n')\n data = data[:-1]\n for row in data:\n row = row.split(',')\n df.loc[count] = [float(elt) for elt in row[:-1]] + [int(row[-1])] # last column has class so it is int rest are float\n count += 1\n\n df = df.sample(frac=1).reset_index(drop=True) # shuffle dataset\n\n X = df.drop(['class'], axis=1)\n X = X.values\n y = df['class'].values\n y = y.astype(int)\n\n return X, y", "def download_and_preprocess(self):\n print('Preparing steering angle database.')\n print('Downloading...')\n self.download()\n print('Preprocessing...')\n self.preprocess()", "def preprocess(self):\n raise RuntimeError(\"please implement this function!\")", "def preprocessing(text, tokenization=0, rm_stopwords=0, numbers_to_text=0, to_tfidf=0):\n\ttrain_data = pd.DataFrame(columns=['text', 'response'])\n\n\tprep_0 = [strip_non_alphanum(line) for line in text]\n\tprep_1 = [line for line in prep_0 if line.rstrip()]\n\tprep_2 = [strip_multiple_whitespaces(line) for line in prep_1]\n\tprep_3 = [line.lower() for line in prep_2]\n\n\tif to_tfidf == 1:\n\t\t#when using tf_idf, removes single character words given that they are ignored by sklearn's TfidfVectorizer\n\t\tprep_3 = [' '.join([word for word in line.split() if len(word) > 1]) for line in prep_3]\n\n\tif tokenization == 1:\n\t\tprep_3 = [line.split(' ') for line in prep_3]\n\t\t#removes whitespaces from the list\n\t\tprep_3 = [list(filter(None, line)) for line in prep_3]\n\telse:\n\t\tprep_3 = [line[:-1] if line[-1] == \" \" else line for line in prep_3]\n\n\tif numbers_to_text == 1 and tokenization == 1:\n\t\t#convert all numbers to integers and convert these numbers to its written form\n\t\ttemp_prep = []\n\t\tfor sentence in prep_3:\n\t\t\ttemporary_sentence = []\n\t\t\tfor word in sentence:\n\t\t\t\tif str(word).isdigit():\n\t\t\t\t\tconverted_words = num2words(int(word), to='cardinal', lang='pt').split(' ')\n\t\t\t\t\tif to_tfidf == 1 and rm_stopwords == 0:\n\t\t\t\t\t\tconverted_words = [word for word in converted_words if word != 'e']\n\t\t\t\t\ttemporary_sentence.extend(converted_words)\n\t\t\t\telse:\n\t\t\t\t\ttemporary_sentence.append(word)\n\t\t\ttemp_prep.append(temporary_sentence)\n\n\t\tprep_3 = temp_prep\n\telif numbers_to_text == 1 and tokenization == 0:\n\t\t#convert all numbers to integers and convert these numbers to its written form\n\t\ttemp_prep = []\n\t\tfor sentence in prep_3:\n\t\t\ttemporary_sentence = []\n\t\t\tfor word in sentence.split(' '):\n\t\t\t\tif str(word).isdigit():\n\t\t\t\t\tconverted_words = num2words(int(word), to='cardinal', lang='pt').split(' ')\n\t\t\t\t\tif to_tfidf == 1 and rm_stopwords == 0:\n\t\t\t\t\t\tconverted_words = [word for word in converted_words if word != 'e']\n\t\t\t\t\ttemporary_sentence.extend(converted_words)\n\t\t\t\telse:\n\t\t\t\t\ttemporary_sentence.append(word)\n\t\t\ttemporary_sentence = ' '.join(temporary_sentence)\n\t\t\ttemp_prep.append(temporary_sentence)\n\t\tprep_3 = temp_prep\n\n\tif rm_stopwords == 1:\n\t\tstp = set(stopwords.words('portuguese') + list(punctuation))\n\t\tif tokenization == 1:\n\t\t\tprep_3 = [[word for word in sentence if word not in stp] for sentence in prep_3]\n\t\telif tokenization == 0:\n\t\t\tprep_3 = [' '.join([word for word in sentence.split(' ') if word not in stp]) for sentence in prep_3]\n\n\ttmp = pd.DataFrame({'text':prep_3[::2], 'response':prep_3[1::2]})\n\ttrain_data = train_data.append(tmp[['text', 'response']], ignore_index=True)\n\n\treturn train_data", "def Classify_Data(self):\n\n lem = lemmatization()\n\n # Get Mongo Client\n client = MongoClient()\n db = client['allMovies']\n collection = db['Movies']\n\n # Path to folder containing the training model files\n path = self.path\n\n # Get the list of doc ids trained\n trained_docs = []\n\n # Mongo queries to retrieve Horror, Romance and Crime movies\n qr1 = self.collection.find({\"content.genres.name\": \"Horror\"})\n qr2 = self.collection.find({\"content.genres.name\": \"Romance\"})\n qr3 = self.collection.find({\"content.genres.name\": \"Crime\"})\n qr4 = self.collection.find({\"content.genres.name\": \"Comedy\"})\n print(\"111\")\n print(qr3)\n\n myfile = open('doc_ids.pkl', 'rb')\n trained_docs = pickle.load(myfile)\n # Get 100 Horror, Romance and Crime movies each, which are not in the trained data set\n\n horr = []\n i = 0\n for rec in qr1:\n if rec['_id'] not in trained_docs:\n i = i + 1\n horr.append(rec)\n\n if i >= 333:\n break\n rom = []\n i = 0\n for rec in qr2:\n if rec['_id'] not in trained_docs:\n i = i + 1\n rom.append(rec)\n\n if i >= 333:\n break\n\n crime = []\n i = 0\n for rec in qr3:\n if rec['_id'] not in trained_docs:\n i = i + 1\n crime.append(rec)\n\n if i >= 334:\n break\n comedy = []\n i = 0\n for rec in qr4:\n if rec['_id'] not in trained_docs:\n i = i + 1\n comedy.append(rec)\n\n if i >= 334:\n break\n\n # Combine the query results\n query_results = []\n for rec in horr:\n query_results.append(rec)\n for rec in rom:\n query_results.append(rec)\n for rec in crime:\n query_results.append(rec)\n print(query_results)\n # Data to be classified\n test_data = []\n\n # Genres of records to be classified\n categories = []\n a = 0\n for movie in query_results:\n test_data.append(movie['content']['overview'])\n for genre in movie['content']['genres']:\n a = a + 1\n if ((genre['name'] == 'Horror') or (genre['name'] == 'Romance') or (genre['name'] == 'Crime') or (\n genre['name'] == 'Comedy') and a <= 80):\n categories.append(genre['name'])\n\n # Lists of training models and vectorizers\n models = [\"SVM\", \"LOGISTIC REGRESSION\", \"GAUSSIAN NB\",\n \"MULTINOMIAL NB\", \"BERNOULLI NB\", \"RANDOM FOREST\", \"BAGGING\", \"GRADIENT\",\n \"Voting\", \"Voting With Weights\"]\n\n vectorizers = [\"COUNT VECTORIZER\", \"TFIDF VECTORIZER\"]\n\n # Load dictionary containing terms appearing in genres\n dictionary = joblib.load(path + \"_Genre_Dictionary\")\n\n vec_1 = feature_extraction.text.CountVectorizer(vocabulary=dictionary)\n vec_2 = feature_extraction.text.TfidfVectorizer(vocabulary=dictionary)\n vec_list = [vec_1, vec_2]\n\n # List to store the classification stats for each model\n stats = []\n # Generate results\n for i in range(0, len(models)):\n for j in range(0, len(vectorizers)):\n time0 = time.process_time()\n model = joblib.load(path + models[i] + \"_\" + vectorizers[j].replace('-', '') + \".pkl\")\n vec = vec_list[j]\n Y = vec.fit_transform(test_data).toarray()\n print(\"y\", Y)\n predicted_genres = model.predict(Y)\n\n k = 0\n horror = 0\n romance = 0\n crime = 0\n\n # Keeps track of correct predictions\n y_correct = []\n\n # Keeps track of incorrect predictions\n y_predicted = []\n for pred in predicted_genres:\n if (categories[k] == \"Horror\"):\n if (pred == \"Horror\"):\n horror += 1\n y_predicted.append(0)\n elif (pred == \"Romance\"):\n y_predicted.append(1)\n else:\n y_predicted.append(2)\n y_correct.append(0)\n elif (categories[k] == \"Romance\"):\n if (pred == \"Romance\"):\n romance += 1\n y_predicted.append(1)\n elif (pred == \"Horror\"):\n y_predicted.append(0)\n else:\n y_predicted.append(2)\n y_correct.append(1)\n elif (categories[k] == \"Crime\"):\n if (pred == \"Crime\"):\n crime += 1\n y_predicted.append(2)\n elif (pred == \"Horror\"):\n y_predicted.append(0)\n else:\n y_predicted.append(1)\n y_correct.append(2)\n k = k + 1\n\n # Print results\n score = precision_recall_fscore_support(y_correct, y_predicted, average='weighted')\n # print(\"Number of records classified per second = %d\" % (round((1000/(time.process_time()-time0)),3)))\n print(\"________SCORES__________\")\n print(\"MODEL : \" + models[i])\n print(\"VECTORIZER : \" + vectorizers[j])\n print(\"Horror : %d/333\" % (horror))\n print(\"Romance : %d/333\" % (romance))\n print(\"Crime : %d/334\" % (crime))\n print(\"Precision : %.5f\" % (score[0]))\n print(\"Recall : %.5f\" % (score[1]))\n print(\"F(1) Score : %.5f\" % ((score[1] * score[0] / (score[1] + score[0])) * 2))\n print(\"F(W) Score : %.5f\" % (score[2]))\n print(\"Accuracy : %.5f\" % accuracy_score(y_correct, y_predicted))\n # print(confusion_matrix(y_correct, y_predicted))\n\n dic = {}\n dic['model'] = models[i].title()\n dic['vectorizer'] = vectorizers[j][:-11]\n dic['horror'] = str(horror) + '/' + '333'\n dic['romance'] = str(romance) + '/' + '333'\n dic['crime'] = str(crime) + '/' + '334'\n dic['precision'] = round(score[0], 3)\n dic['Recall'] = round(score[1], 3)\n dic['F(1) Score'] = round(((score[1] * score[0] / (score[1] + score[0])) * 2), 3)\n dic['F(W) Score'] = round(score[2], 3)\n dic['accuracy'] = round(accuracy_score(y_correct, y_predicted), 3)\n stats.append(dic)\n # Store stats in file\n joblib.dump(stats, path + \"classification_results.txt\")\n\n print(\"Done\")\n return stats", "def preprocess(data):\n # Data Preprocessing\n data['GDP_scaled']=preprocessing.scale(data['GDP'])\n data['CLPRB_scaled']=preprocessing.scale(data['CLPRB'])\n data['EMFDB_scaled']=preprocessing.scale(data['EMFDB'])\n data['ENPRP_scaled']=preprocessing.scale(data['ENPRP'])\n data['NGMPB_scaled']=preprocessing.scale(data['NGMPB'])\n data['PAPRB_scaled']=preprocessing.scale(data['PAPRB'])\n data['PCP_scaled']=preprocessing.scale(data['PCP'])\n data['ZNDX_scaled']=preprocessing.scale(data['ZNDX'])\n data['OP_scaled']=preprocessing.scale(data['Nominal Price'])\n data['OP2_scaled']=preprocessing.scale(data['Inflation Adjusted Price'])\n\n return data", "def pre_pipeline(self, results):\n results[\"img_prefix\"] = self.img_prefix\n results[\"seg_prefix\"] = self.seg_prefix\n results[\"proposal_file\"] = self.proposal_file\n results[\"bbox_fields\"] = []\n results[\"mask_fields\"] = []\n results[\"seg_fields\"] = []\n results[\"site_fields\"] = []\n results[\"label_fields\"] = []", "def preprocess_data(data):\n def deal_line(line):\n pos, rating = line.split(',')\n row, col = pos.split(\"_\")\n row = row.replace(\"r\", \"\")\n col = col.replace(\"c\", \"\")\n return int(row), int(col), float(rating)\n\n def statistics(data):\n row = set([line[0] for line in data])\n col = set([line[1] for line in data])\n return min(row), max(row), min(col), max(col)\n\n # parse each line\n data = [deal_line(line) for line in data]\n\n # do statistics on the dataset.\n min_row, max_row, min_col, max_col = statistics(data)\n print(\"number of items: {}, number of users: {}\".format(max_row, max_col))\n\n # build rating matrix.\n ratings = sp.lil_matrix((max_row, max_col))\n for row, col, rating in data:\n ratings[row - 1, col - 1] = rating\n return ratings", "def pre_process(self, documents):\n\n return documents", "def main():\n df = prepro_last()\n X, y = train_build(df)\n fit_store(X, y)", "def train(self):\n lFileList = []\n for fFileObj in os.walk(\"movies_reviews/\"):\n lFileList = fFileObj[2]\n break\n for rev in lFileList:\n if int(rev[7])== 1:\n contents = self.loadFile(\"movies_reviews/\" + rev)\n listOfWords = self.tokenize(contents)\n for word in listOfWords:\n self.negRev[word] = self.negRev.get(word, 0) + 1\n if int(rev[7])== 5:\n contents = self.loadFile(\"movies_reviews/\" + rev)\n listOfWords = self.tokenize(contents)\n for word in listOfWords:\n self.posRev[word] = self.posRev.get(word, 0) + 1\n self.save(self.posRev, \"posRev\")\n self.save(self.negRev, \"negRev\")", "def preprocess(self):\n lines = [line.rstrip() for line in open(self.attr_path, 'r')]\n all_attr_names = lines[1].split()\n for i, attr_name in enumerate(all_attr_names):\n self.attr2idx[attr_name] = i\n self.idx2attr[i] = attr_name\n\n lines = lines[2:]\n random.seed(1234)\n random.shuffle(lines)\n for i, line in enumerate(lines):\n split = line.split()\n filename = split[0]\n values = split[1:]\n\n label = []\n for attr_name in self.selected_attrs:\n idx = self.attr2idx[attr_name]\n label.append(values[idx] == '1')\n\n if (i+1) < 4:\n self.test_dataset.append([filename, label])\n else:\n self.train_dataset.append([filename, label])", "def _process_data(self):\r\n # Rename columns to match final feature class\r\n self._rename_columns()\r\n # Add point ID column\r\n self._add_pointid()\r\n # Sort rows by transect id and timestamp\r\n self._sort_rows()\r\n # Fill Null records with a value\r\n self._fill_nulls()\r\n # Set site_code to lower case\r\n self._lower_site_code()\r\n # Create survey_id\r\n self._calc_survey_id()\r\n # Calculate nativesg column if at least one of the veg columns is a Native seagrass type\r\n if set(self.veg_columns).intersection(set(NATIVESG_CODES)) > 0:\r\n self.nativesg_columns = list(set(self.veg_columns).intersection(set(NATIVESG_CODES)))\r\n self._calc_nativesg()\r\n #\r", "def prepare(self):\n if self.opts['verbose']:\n print(\"Preparing dataset (one-time operation)...\")\n # Create paths files and load them back in\n self._build_ID_sets()\n self._create_ID_files()\n self._load_ID_files()\n if self.opts['verbose']:\n print(\"... done with preparing the dataset.\")", "def prepare_imdb_data(data, labels, should_shuffle=True):\n\n #Combine positive and negative reviews and labels\n data_train = data['train']['pos'] + data['train']['neg']\n data_test = data['test']['pos'] + data['test']['neg']\n labels_train = labels['train']['pos'] + labels['train']['neg']\n labels_test = labels['test']['pos'] + labels['test']['neg']\n\n #Shuffle reviews and corresponding labels within training and test sets\n if should_shuffle:\n data_train, labels_train = shuffle(data_train, labels_train)\n data_test, labels_test = shuffle(data_test, labels_test)\n\n # Return a unified training data, test data, training labels, test labets\n return data_train, data_test, labels_train, labels_test", "def preprocess_data(df, min_vote_count=1000):\n # note that order matters!\n df = remove_rows_without_feature(df, 'budget')\n df = remove_rows_without_feature(df, 'runtime')\n df = remove_rows_with_non_english_movies(df)\n df = binarize_homepage(df)\n df = add_producers_feature(df)\n df = add_executive_producers_feature(df)\n df = get_movie_scores(df, min_vote_count)\n df = binarize_english(df)\n df = bin_ratings(df)\n df = binarize_genres(df)\n df = binarize_belongs_to_collection(df)\n df = binarize_production_countries(df)\n df = drop_unnecessary_columns(df)\n\n # Export to CSV\n y = df[['rating']]\n x = df.drop(['rating'], 1)\n\n y.to_csv(r'../dataset/Y.csv', index=False)\n x.to_csv(r'../dataset/X.csv', index=False)", "def analyse(self):\n self.__gather_tagged_reviews(self._restaurants)", "def call_preprocessing(self, train_mains, train_appliances):\n return train_mains, train_appliances", "def _preprocess(self):\n\n self.df = self.df[(self.df['days_b_screening_arrest'] <= 30)\n & (self.df['days_b_screening_arrest'] >= -30)\n & (self.df['is_recid'] != -1)\n & (self.df['c_charge_degree'] != 'O')\n & (self.df['score_text'] != 'N/A')]\n\n self.df['c_jail_out'] = pd.to_datetime(self.df['c_jail_out'])\n self.df['c_jail_in'] = pd.to_datetime(self.df['c_jail_in'])\n self.df['length_of_stay'] = (self.df['c_jail_out']\n - self.df['c_jail_in'])\n\n self.df['score_factor'] = np.where(self.df['score_text']\n != 'Low',\n 'HighScore', 'LowScore')\n self.df['y_pred'] = (self.df['score_factor'] == 'HighScore')", "def pre_analyse():\n t = transform()\n model = modified_resnet50()\n model.load_state_dict(\n torch.load(\n \"model.pth.tar\",\n map_location=torch.device(\"cpu\"),\n )[\"state_dict\"]\n )\n model.eval()\n\n def get_preds(img_path):\n \"\"\"\n Gives labelds and probabilities for a single image\n This is were we preprocess the image, using a function defined in the model class\n \"\"\"\n # load image\n img = Image.open(img_path).convert(\"RGB\")\n # process it\n x = t(img)\n # get in in the right format\n x = Variable(x).unsqueeze(0)\n # predictions\n output = model(x)\n # decode\n output = decode(output.cpu().data.numpy()[0])\n\n # filter\n # return pred, proba\n return output\n\n return get_preds(\"image.jpg\")", "def _preparation_workflow(self):\n self._validate_environment()\n self._validate_parameters()\n self._update_verbosity()", "def create_tc_data(data_name, base_location='data',mode = 'train'):\n\n dataset = {'labels': [], 'content': []}\n max_samples = 115000 if mode == 'train' else 7600\n label_to_class = dict()\n\n if data_name == 'yelp':\n df = pd.read_csv(os.path.join(base_location, 'yelp_review_full_csv', mode+'.csv'), \n header=None, names=['labels', 'content'])\n \n df.dropna(subset=['content'], inplace=True)\n df.loc[:, 'content'] = df.content.swifter.apply(preprocess)\n # filter rows with length greater than 20 (2 words including spaces on average)\n df.drop(df[df['content'].map(len) < 20].index, inplace=True)\n # shuffle and sample \n df = df.sample(n = max_samples)\n \n \n dataset['labels'].extend(list(df.labels[:max_samples]))\n dataset['content'].extend(list(df.content[:max_samples]))\n\n elif data_name == 'amazon':\n df = pd.read_csv(os.path.join(base_location, 'amazon_review_full_csv', mode+'.csv'), \n header=None, names=['labels','title','content'])\n df.dropna(subset=['content'], inplace=True)\n # df.dropna(subset=['title'], inplace=True)\n df.loc[:, 'content'] = df.content.swifter.apply(preprocess)\n # filter rows with length greater than 20 (2 words including spaces on average)\n df.drop(df[df['content'].map(len) < 20].index, inplace=True)\n # shuffle and sample \n df = df.sample(n = max_samples)\n\n dataset['labels'].extend(list(df.labels[:max_samples]))\n # dataset['content'].extend( [title + \"[SEP]\"+ content for title,content in zip(list(df.title[:max_samples]),list(df.content[:max_samples]))])\n dataset['content'].extend(list(df.content[:max_samples]))\n\n elif data_name == 'yahoo':\n df = pd.read_csv(os.path.join(base_location, 'yahoo_answers_csv', mode+'.csv'), \n header=None, names=['labels', 'title', 'content', 'answer'])\n df.dropna(subset=['content'], inplace=True)\n # df.dropna(subset=['title'], inplace=True)\n df.dropna(subset=['answer'], inplace=True)\n df.loc[:, 'content'] = df.content.swifter.apply(preprocess)\n # filter rows with length greater than 20 (2 words including spaces on average)\n df.drop(df[df['content'].map(len) < 20].index, inplace=True)\n # shuffle and sample \n df = df.sample(n = max_samples)\n dataset['labels'].extend(list(df.labels[:max_samples]))\n # dataset['content'].extend( [title + \"[SEP]\"+ content + \"[SEP]\" +answer for title,content,answer in zip(list(df.title[:max_samples]),list(df.content[:max_samples]),list(df.answer[:max_samples]))])\n dataset['content'].extend( [content + \"[SEP]\" +answer for content,answer in zip(list(df.content[:max_samples]),list(df.answer[:max_samples]))])\n\n elif data_name == 'dbpedia':\n df = pd.read_csv(os.path.join(base_location, 'dbpedia_csv', mode+'.csv'), \n header=None, names=['labels','title','content'])\n\n df.dropna(subset=['content'], inplace=True)\n # df.dropna(subset=['title'], inplace=True)\n df.loc[:, 'content'] = df.content.swifter.apply(preprocess)\n # filter rows with length greater than 20 (2 words including spaces on average)\n df.drop(df[df['content'].map(len) < 20].index, inplace=True)\n # shuffle and sample \n df = df.sample(n = max_samples)\n\n dataset['labels'].extend(list(df.labels[:max_samples]))\n # dataset['content'].extend( [title + \"[SEP]\"+ content for title,content in zip(list(df.title[:max_samples]),list(df.content[:max_samples]))])\n dataset['content'].extend(list(df.content[:max_samples]))\n \n else:\n df = pd.read_csv(os.path.join(base_location, 'ag_news_csv', mode+'.csv'), \n header=None, names=['labels','title','content'])\n df.dropna(subset=['content'], inplace=True)\n # df.dropna(subset=['title'], inplace=True)\n df.loc[:, 'content'] = df.content.swifter.apply(preprocess)\n # filter rows with length greater than 20 (2 words including spaces on average)\n df.drop(df[df['content'].map(len) < 20].index, inplace=True)\n # shuffle and sample \n df = df.sample(n = max_samples)\n dataset['labels'].extend(list(df.labels[:max_samples]))\n # dataset['content'].extend( [title + \"[SEP]\"+ content for title,content in zip(list(df.title[:max_samples]),list(df.content[:max_samples]))])\n dataset['content'].extend(list(df.content[:max_samples]))\n\n return dataset['labels'],dataset['content']", "def preprocess(args):\n prism.preprocess.run(\n input_fp=args.input,\n output_fp=args.output,\n no_prefilter=args.no_prefilter,\n full_pattern_proportion=args.full_pattern_proportion,\n error=args.error,\n bisulfite_conversion_rate=args.bisulfite_conversion_rate,\n processivity=args.processivity,\n recruitment_efficiency=args.recruitment_efficiency,\n threads=args.threads,\n seed=args.seed,\n verbose=args.verbose,\n )", "def prep_data():\n loader = DLoader()\n cap = loader.visitor_cnt\n\n pass", "def preprocess(config: Config) -> None:\n print(colored(\"preprocessing:\", attrs=[\"bold\"]))\n factory = PreprocessingFactory()\n factory.process(config)", "def main():\n num_rows = 500000\n review_df = pd.read_csv(\"s3://msia490project/processed_video_reviews.csv\").dropna().head(num_rows)\n # train and test set split\n X_train, X_test, y_train, y_test = train_test_split(review_df['reviewText'], review_df['score'],\n random_state=115)\n # re-run the model pipeline and generate necessary artifacts for making predictions\n best_svm = LinearSVC(random_state=115)\n ngram_range = (1, 3)\n generate_artifacts_for_best_svm_model(best_svm, ngram_range, X_train, y_train)", "def prepare_data(self):", "def __getData(self,review,post_type):\r\n page = {'title':''}\r\n try:\r\n page['et_author_name'] = stripHtml(review.find('p','post_title').find('a').renderContents()).replace('> ','').replace('...','')\r\n except:\r\n log.info(self.log_msg('author name not found'))\r\n try:\r\n aut_info = {'ei_author_points_count':'crp_points','ei_author_posts_count':'forum_posts_count'}\r\n for each in aut_info.keys():\r\n page[each] = int(re.search('\\d+',stripHtml(review.find('p',aut_info[each]).renderContents())).group())\r\n except:\r\n log.info(self.log_msg('Author posts count not found'))\r\n try:\r\n page['et_author_membership'] = stripHtml(review.find('p','crp_level').renderContents())\r\n except:\r\n log.info(self.log_msg('Author member ship not found'))\r\n try:\r\n post_tag = review.find('div','commentbox_mid')\r\n page['title'] = stripHtml(post_tag.find('h2','post_title').renderContents())\r\n date_str = stripHtml(post_tag.find('p','post_date').renderContents()).split('|')[0].strip()\r\n page['posted_date'] = datetime.strftime(datetime.strptime(date_str,'%m-%d-%Y %I:%M %p'),\"%Y-%m-%dT%H:%M:%SZ\")\r\n remove_tags = {'div':['commentbox_nav','commentbox_sig'],'h2':['post_title'],'p':['post_date']}\r\n for each_key in remove_tags.keys():\r\n for each in remove_tags[each_key]:\r\n tag = post_tag.find(each_key,each)\r\n if tag:\r\n tag.extract()\r\n tags = review.findAll('blockquote')\r\n for each in tags:\r\n each.extract()\r\n page['data'] = stripHtml(post_tag.renderContents())\r\n except:\r\n log.exception(self.log_msg('title not found'))\r\n return False\r\n try:\r\n if page['title']=='':\r\n if len(page['data']) > 50:\r\n page['title'] = page['data'][:50] + '...'\r\n else:\r\n page['title'] = page['data']\r\n except:\r\n log.exception(self.log_msg('title not found'))\r\n page['title'] = ''\r\n try:\r\n page['et_data_reply_to'] = self.thread_id\r\n except:\r\n log.info(self.log_msg('data reply to is not found'))\r\n try:\r\n page['et_data_post_type'] = post_type\r\n except:\r\n log.info(self.log_msg('Page info is missing'))\r\n try:\r\n page['et_data_forum'] = self.hierarchy[0]\r\n page['et_data_subforum'] = self.hierarchy[1]\r\n page['et_data_topic'] = self.hierarchy[2]\r\n except:\r\n log.exception(self.log_msg('data forum not found'))\r\n## try:\r\n## data_str = review.find('div','threadText')\r\n##\r\n## data_tag = review.find('div','threadDetails')\r\n## [x.findParent('div') for x in data_tag.findAll('blockquote')]\r\n## for each in ['threadSubject','threadLinks']:\r\n## tag = data_tag.find('div',each)\r\n## if tag:\r\n## tag.extract()\r\n## page['data'] = stripHtml(data_tag.renderContents()).replace('______________________________________________________\\nPlease mark replies as answers if they answered your question...','').strip()\r\n## except:\r\n## log.info(self.log_msg('data not found'))\r\n## page['data'] =''\r\n return page", "def preprocess_data(train_neg_file_pattern,\n train_pos_file_pattern,\n test_neg_file_pattern,\n test_pos_file_pattern,\n transformed_train_file_pattern,\n transformed_test_file_pattern,\n transformed_metadata_dir,\n raw_metadata_dir,\n transform_func_dir,\n temp_dir,\n vocab_size,\n delimiters):\n pipeline_name = 'DataflowRunner'\n options = {\n 'job_name': ('cloud-ml-hazmat-preprocess-{}'.format(datetime.datetime.now().strftime('%Y%m%d%H%M%S'))),\n 'temp_location': temp_dir,\n 'project': \"stone-outpost-636\",\n 'max_num_workers': 8\n }\n pipeline_options = beam.pipeline.PipelineOptions(flags=[], **options)\n #with beam.Pipeline(pipeline_name, options=pipeline_options) as pipeline:\n # with beam_impl.Context(temp_dir=temp_dir):\n with beam.Pipeline() as pipeline:\n with beam_impl.Context(temp_dir=tempfile.mkdtemp()):\n\n train_data = pipeline | 'ReadTrain' >> ReadAndShuffleData((train_neg_file_pattern, train_pos_file_pattern))\n test_data = pipeline | 'ReadTest' >> ReadAndShuffleData((test_neg_file_pattern, test_pos_file_pattern))\n preprocessing_fn = generate_preprocessing_fn(vocab_size, delimiters)\n\n (transformed_train_data, transformed_metadata), transform_fn = ((train_data, const.RAW_METADATA)\n | 'AnalyzeAndTransform' >> beam_impl.AnalyzeAndTransformDataset(preprocessing_fn))\n\n _ = (transform_fn | 'WriteTransformFn' >> tft_beam_io.WriteTransformFn(transform_func_dir))\n\n transformed_test_data, _ = (((test_data, const.RAW_METADATA), transform_fn)\n | 'Transform' >> beam_impl.TransformDataset())\n\n _ = (transformed_train_data\n | 'WriteTrainData' >> tfrecordio.WriteToTFRecord(transformed_train_file_pattern,\n coder=example_proto_coder.ExampleProtoCoder(transformed_metadata.schema)))\n\n _ = (transformed_test_data\n | 'WriteTestData' >> tfrecordio.WriteToTFRecord(transformed_test_file_pattern,\n coder=example_proto_coder.ExampleProtoCoder(transformed_metadata.schema)))\n\n _ = (transformed_metadata\n | 'WriteTransformedMetadata' >> beam_metadata_io.WriteMetadata(transformed_metadata_dir, pipeline=pipeline))\n\n _ = (const.RAW_METADATA\n | 'WriteRawMetadata' >> beam_metadata_io.WriteMetadata(raw_metadata_dir, pipeline=pipeline))", "def stats_preprocessing(self):\n output = {'before_tot':[],\n 'before_unique':[],\n 'after_tot':[],\n 'after_unique':[]}\n for i in range(len(self.table)):\n description_raw = self.table.description.iloc[i].split(' ')\n clean_txt = self.table.clean_text.iloc[i].split(' ')\n\n output['before_tot'].append(len(description_raw))\n output['before_unique'].append(len(set(description_raw)))\n output['after_tot'].append(len(clean_txt))\n output['after_unique'].append(len(set(clean_txt)))\n \n print(\"\"\"Before preprocessing a description had on average {0} words with standard deviation {1}. \\n\nMoreover, the average of unique words was {2} and the standard deviation {3}.\"\"\"\\\n .format(round(mean(output['before_tot']), 2), round(stdev(output['before_tot']), 2), \n round(mean(output['before_unique']), 2), round(stdev(output['before_unique'])), 2))\n \n print(\"\"\"\\nAfter preprocessing a description has on average {0} words with standard deviation {1}. \\n \nThe average of unique words is now {2} and the standard deviation {3}.\"\"\"\\\n .format(round(mean(output['after_tot']), 2), round(stdev(output['after_tot']), 2), \n round(mean(output['after_unique']),2), round(stdev(output['after_unique']), 2)))\n\n return output", "def _tidyBeforeRun (self):\n\t\tself._buildProps ()\n\t\tself._buildInput ()\n\t\tself._buildProcVars ()\n\t\tself._buildJobs ()", "def preprocessing_pipeline(self):\n self.__multilabel_processing()\n self.__split_dataset()\n self.__save_datasets()", "def pre_process(cls, *args, **kwargs):\n pass", "def pre_process(cls, *args, **kwargs):\n pass", "def pre_process(cls, *args, **kwargs):\n pass", "def pre_process(cls, *args, **kwargs):\n pass", "def preprocess(\n self, data: List[Dict[str, Any]]\n ) -> Generator[Dict[str, Any], None, None]:\n raise NotImplementedError", "def test_hreview(self):\n rev1 = microformats.models.hReview()\n rev1.summary=\"Acme's new services rock!\"\n rev1.type='business'\n rev1.description='Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat.'\n rev1.rating=4\n rev1.dtreviewed=datetime.datetime(2009,4,10)\n rev1.reviewer='John Smith'\n rev1.fn='Acme Corp'\n rev1.url='http://acme.com'\n rev1.tel='+44(0)1234 567456'\n rev1.street_address = '5445 N. 27th Street'\n rev1.extended_address = ''\n rev1.locality = 'Milwaukee'\n rev1.region = 'WI'\n rev1.country_name = 'US'\n rev1.postal_code = '53209'\n rev1.save()\n rev2 = microformats.models.hReview()\n rev2.summary = 'A phenomenal tuba recital'\n rev2.description = 'Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat.'\n rev2.rating=5\n rev2.type='event'\n rev2.reviewer='John Doe'\n rev2.fn='John Fletcher - One man and his Tuba'\n rev2.url='http://www.johnfletcher-tuba.co.uk/'\n rev2.dtstart = datetime.datetime(1987, 10, 3, 19, 30)\n rev2.street_address = 'The Pro Arte Theatre'\n rev2.locality = 'London'\n rev2.save()\n rev3 = microformats.models.hReview()\n rev3.summary = 'Latest Star-Wars is Sucko-Barfo'\n rev3.description = 'Claritas est etiam processus dynamicus, qui sequitur mutationem consuetudium lectorum. Mirum est notare quam littera gothica, quam nunc putamus parum claram, anteposuerit litterarum formas humanitatis per seacula quarta decima et quinta decima. Eodem modo typi, qui nunc nobis videntur parum clari, fiant sollemnes in futurum.'\n rev3.rating=1\n rev3.type='film'\n rev3.reviewer='Barry Norman'\n rev3.fn='Star Wars - Revenge of the Sith'\n rev3.url='http://www.starwars.com/movies/episode-iii/'\n rev3.save()\n # Test for a review concerning something represented by an hCard\n result = hreview(rev1, autoescape=True) \n expected = u'\\n<div class=\"hreview\">\\n <strong class=\"summary\">Acme&#39;s new services rock!</strong>\\n <abbr class=\"type\" title=\"business\"> Business</abbr> Review\\n <br/>\\n \\n <abbr title=\"\" class=\"dtreviewed\">Fri 10 Apr 2009</abbr>\\n \\n by\\n <span class=\"reviewer vcard\"><span class=\"fn\">John Smith</span></span>\\n \\n \\n <div class=\"item vcard\">\\n \\n <a class=\"url fn org\" href=\"http://acme.com\">\\n \\n Acme Corp\\n \\n </a>\\n \\n <div class=\"tel\">+44(0)1234 567456</div>\\n \\n \\n<div class=\"adr\">\\n <div class=\"street-address\">5445 N. 27th Street</div>\\n \\n <span class=\"locality\">Milwaukee</span>&nbsp;\\n <span class=\"region\">WI</span>&nbsp;\\n <span class=\"postal-code\">53209</span>&nbsp;\\n <span class=\"country-name\">United States</span>\\n</div>\\n\\n \\n </div>\\n \\n \\n \\n \\n \\n \\n <abbr class=\"rating\" title=\"4\">\\u2605\\u2605\\u2605\\u2605\\u2606</abbr>\\n \\n \\n \\n <blockquote class=\"description\">\\n Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat.\\n </blockquote>\\n \\n</div>\\n'\n self.assertEquals(expected, result)\n # Test for a review concerning something represented by an hCalendar\n result = hreview(rev2, autoescape=True) \n expected = u'\\n<div class=\"hreview\">\\n <strong class=\"summary\">A phenomenal tuba recital</strong>\\n <abbr class=\"type\" title=\"event\"> Event</abbr> Review\\n <br/>\\n \\n by\\n <span class=\"reviewer vcard\"><span class=\"fn\">John Doe</span></span>\\n \\n <div class =\"item vevent\">\\n <a href=\"http://www.johnfletcher-tuba.co.uk/\" class=\"url\">\\n \\n <abbr title=\"1987-10-03T19:30:00\" class=\"dtstart\">Sat 03 Oct 1987 7:30 p.m.</abbr>\\n \\n \\n </a> -\\n <span class=\"summary\">John Fletcher - One man and his Tuba</span>\\n \\n \\n<div class=\"adr\">\\n <div class=\"street-address\">The Pro Arte Theatre</div>\\n \\n <span class=\"locality\">London</span>&nbsp;\\n \\n \\n \\n</div>\\n\\n \\n </div>\\n \\n \\n \\n \\n \\n \\n <abbr class=\"rating\" title=\"5\">\\u2605\\u2605\\u2605\\u2605\\u2605</abbr>\\n \\n \\n <blockquote class=\"description\">\\n Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo consequat.\\n </blockquote>\\n \\n</div>\\n'\n self.assertEquals(expected, result)\n # Test for a review about anything else\n result = hreview(rev3, autoescape=True) \n expected = u'\\n<div class=\"hreview\">\\n <strong class=\"summary\">Latest Star-Wars is Sucko-Barfo</strong>\\n <abbr class=\"type\" title=\"film\"> Film</abbr> Review\\n <br/>\\n \\n by\\n <span class=\"reviewer vcard\"><span class=\"fn\">Barry Norman</span></span>\\n \\n \\n \\n <div class=\"item\">\\n \\n <a class=\"url fn\" href=\"http://www.starwars.com/movies/episode-iii/\">\\n \\n Star Wars - Revenge of the Sith\\n \\n </a>\\n \\n </div>\\n \\n \\n \\n \\n <abbr class=\"rating\" title=\"1\">\\u2605\\u2606\\u2606\\u2606\\u2606</abbr>\\n \\n \\n \\n \\n \\n \\n <blockquote class=\"description\">\\n Claritas est etiam processus dynamicus, qui sequitur mutationem consuetudium lectorum. Mirum est notare quam littera gothica, quam nunc putamus parum claram, anteposuerit litterarum formas humanitatis per seacula quarta decima et quinta decima. Eodem modo typi, qui nunc nobis videntur parum clari, fiant sollemnes in futurum.\\n </blockquote>\\n \\n</div>\\n'\n self.assertEquals(expected, result)\n # Test for minimal review\n rev3.summary = ''\n rev3.description = ''\n rev3.rating = 1\n rev3.type = 'film'\n rev3.reviewer = 'Barry Norman'\n rev3.fn = 'Star Wars - Revenge of the Sith'\n rev3.url = ''\n result = hreview(rev3, autoescape=True) \n expected = u'\\n<div class=\"hreview\">\\n \\n <abbr class=\"type\" title=\"film\"> Film</abbr> Review\\n <br/>\\n \\n by\\n <span class=\"reviewer vcard\"><span class=\"fn\">Barry Norman</span></span>\\n \\n \\n \\n <div class=\"item\">\\n \\n <span class=\"fn\">\\n \\n Star Wars - Revenge of the Sith\\n \\n </span>\\n \\n </div>\\n \\n \\n \\n \\n <abbr class=\"rating\" title=\"1\">\\u2605\\u2606\\u2606\\u2606\\u2606</abbr>\\n \\n \\n \\n \\n \\n \\n</div>\\n'\n self.assertEquals(expected, result)", "def process_reviews(reviews: list):\n\n def process_review(review, i, n):\n print(f'\\rProcessing {i + 1} of {n} reviews', end='')\n return nltk.pos_tag(nltk.word_tokenize(review[0].strip())), review[1]\n\n n = len(reviews)\n processed = [process_review(review, i, n)\n for i, review in enumerate(reviews)]\n return processed", "def prep_data(ratings_df, watched_df=None, watchlist_df=None,\n good_threshold=4, bad_threshold=3):\n id_book = pd.read_csv('title_basics_small.csv')\n try:\n # try to read Letterboxd user data\n # drop rows with nulls in the columns we use\n ratings_df = ratings_df.dropna(axis=0, subset=['Rating', 'Name', 'Year'])\n # split according to user rating\n good_df = ratings_df[ratings_df['Rating'] >= good_threshold]\n bad_df = ratings_df[ratings_df['Rating'] <= bad_threshold]\n neutral_df = ratings_df[(ratings_df['Rating'] > bad_threshold) & (ratings_df['Rating'] < good_threshold)]\n # convert dataframes to lists\n good_list, good_dict = df_to_id_list(good_df, id_book)\n bad_list, bad_dict = df_to_id_list(bad_df, id_book)\n neutral_list, neutral_dict = df_to_id_list(neutral_df, id_book)\n except KeyError:\n # Try to read IMDb user data\n # strip ids of \"tt\" prefix\n ratings_df['movie_id'] = ratings_df['Const'].apply(lambda x: str(x).lstrip(\"tt\"))\n # drop rows with nulls in the columns we use\n ratings_df = ratings_df.dropna(axis=0, subset=['Your Rating', 'Year'])\n # split according to user rating\n good_df = ratings_df[ratings_df['Your Rating'] >= good_threshold*2]\n bad_df = ratings_df[ratings_df['Your Rating'] <= bad_threshold*2]\n neutral_df = ratings_df[(ratings_df['Your Rating'] > bad_threshold*2) & (ratings_df['Your Rating'] < good_threshold*2)]\n # convert dataframes to lists\n good_list = good_df['movie_id'].to_list()\n bad_list = bad_df['movie_id'].to_list()\n neutral_list = neutral_df['movie_id'].to_list()\n # make ratings dictionaries\n good_dict = dict(zip(good_list, good_df['Your Rating'].tolist()))\n bad_dict = dict(zip(bad_list, bad_df['Your Rating'].tolist()))\n neutral_dict = dict(zip(neutral_list, neutral_df['Your Rating'].tolist()))\n except Exception as e:\n # can't read the dataframe as Letterboxd or IMDb user data\n print(\"This dataframe has columns:\", ratings_df.columns)\n raise Exception(e)\n\n ratings_dict = dict(list(good_dict.items()) + list(bad_dict.items()) + list(neutral_dict.items()))\n\n if (watched_df is not None) and (not watched_df.empty):\n # Construct list of watched movies that aren't rated \"good\" or \"bad\"\n # First, get a set of identified IDs.\n rated_names = set(good_df.Name.tolist() + bad_df.Name.tolist() + neutral_list)\n # drop nulls from watched dataframe\n full_history = watched_df.dropna(axis=0, subset=['Name', 'Year'])\n # get list of watched movies that haven't been rated\n hist_list = df_to_id_list(full_history[~full_history['Name'].isin(rated_names)], id_book)[0]\n # add back list of \"neutral\" movies (whose IDs we already found before)\n hist_list = hist_list + neutral_list\n else: hist_list = neutral_list\n\n if (watchlist_df is not None) and (not watchlist_df.empty):\n try:\n watchlist_df = watchlist_df.dropna(axis=0, subset=['Name', 'Year'])\n val_list = df_to_id_list(watchlist_df, id_book)[0]\n except KeyError:\n watchlist_df = watchlist_df.dropna(axis=0, subset=['Const', 'Year'])\n watchlist_df['movie_id'] = watchlist_df['Const'].str.lstrip(\"tt\")\n val_list = watchlist_df['movie_id'].tolist()\n else: val_list = []\n\n return (good_list, bad_list, hist_list, val_list, ratings_dict)", "def pre_process_data(self, all_labels, all_data):\n\n # [1] Normalizes data\n all_data = self.pre_precess_manager.normalization(all_data)\n\n data_train, data_test, label_train, label_test = train_test_split(all_data, all_labels, test_size=0.1,\n shuffle=True)\n\n return data_train, data_test, label_train, label_test", "def _process(self):\n f = osp.join(self.processed_dir, 'pre_filter.pt')\n if osp.exists(f) and torch.load(f) != _repr(self.pre_filter):\n warnings.warn(\n \"The `pre_filter` argument differs from the one used in \"\n \"the pre-processed version of this dataset. If you want to \"\n \"make use of another pre-fitering technique, make sure to \"\n \"delete '{self.processed_dir}' first\")\n\n if files_exist(self.processed_paths): # pragma: no cover\n return\n\n if self.log and 'pytest' not in sys.modules:\n print('Processing...', file=sys.stderr)\n\n makedirs(self.processed_dir)\n self.process()\n\n path = osp.join(self.processed_dir, 'pre_filter.pt')\n torch.save(_repr(self.pre_filter), path)\n\n if self.log and 'pytest' not in sys.modules:\n print('Done!', file=sys.stderr)", "def predict(self, reviews):\n self.vect_rev = self.vectorizer.transform(reviews)\n self.dmat = xgb.DMatrix(self.vect_rev)\n self.probs = self.bst.predict(self.dmat)\n\n # Get tough on reviews by requiring 0.6 probability threshold\n self.preds = 1 * (self.probs > 0.6)", "def prepare_preprocessed(inputFileName, sentence=False):\n\n # Define functions for stopwords, bigrams, trigrams and lemmatization\n def remove_stopwords(texts):\n return [[word for word in doc if word not in stop_words] for doc in texts]\n\n if sentence:\n data = readInputFileSentence(inputFileName, True)\n else:\n data = readInputFile(inputFileName, True)\n\n\n data_words_nostops = remove_stopwords(data)\n\n\n # Create Dictionary\n id2word = corpora.Dictionary(data_words_nostops)\n\n # Create Corpus\n # Term Document Frequency\n corpus = [id2word.doc2bow(text) for text in data]\n\n return corpus, id2word, data, None #No bigrams in this case", "def prepare_data(train, test):\n # change the name of the target column\n train.rename(columns={\"revenue\": \"target\"}, inplace=True)\n # map bool values to yes and no\n train[\"Weekend\"] = train[\"Weekend\"].map({True: \"Yes\", False: \"No\"})\n test[\"Weekend\"] = test[\"Weekend\"].map({True: \"Yes\", False: \"No\"})\n # set the id col as index\n train.set_index(\"id\", inplace=True)\n test.set_index(\"id\", inplace=True)\n\n # seperate the fetures and the target\n X_train = train.drop(\"target\", axis=1).copy()\n y_train = train[\"target\"].copy()\n X_test = test.copy()\n\n # select numerical and categorical columns\n num_cols = X_train.select_dtypes(exclude=\"object\").columns.tolist()\n cat_cols = X_train.select_dtypes(include=\"object\").columns.tolist()\n\n # numerical pipeline\n num_pipe = make_pipeline(SimpleImputer(strategy=\"mean\"))\n\n # categorical pipeline\n cat_pipe = make_pipeline(\n SimpleImputer(strategy=\"constant\", fill_value=\"NA\"),\n OneHotEncoder(handle_unknown=\"ignore\", sparse=False),\n )\n\n # full pipeline for data preprocessing\n full_pipe = ColumnTransformer(\n [(\"num\", num_pipe, num_cols), (\"cat\", cat_pipe, cat_cols)]\n )\n return X_train, y_train, X_test, full_pipe", "def preprocess_US_Airlines_data(self):\n print(\"Processing US Airlines dataset ...\")\n self.full_dataset = self.full_dataset.loc[:,[\"airline_sentiment\", \"text\", \"tweet_created\", \"tweet_location\"]]\n self.full_dataset[\"score\"] = self.full_dataset[\"airline_sentiment\"].apply(lambda x: self.string_sentiment_to_idx(x))" ]
[ "0.7254628", "0.70474005", "0.69253355", "0.6693225", "0.6500812", "0.6482826", "0.64625686", "0.6401025", "0.6401025", "0.6401025", "0.63591665", "0.63579935", "0.63479227", "0.6305108", "0.6280583", "0.6279011", "0.62662226", "0.62520474", "0.6230281", "0.6212326", "0.61989707", "0.61331695", "0.6121733", "0.6116308", "0.6079463", "0.5976481", "0.5972374", "0.5972374", "0.5972374", "0.5972374", "0.5972374", "0.5955247", "0.59503603", "0.5935478", "0.5912753", "0.5898543", "0.58874905", "0.58784425", "0.58634084", "0.5860246", "0.58509785", "0.5837272", "0.5821753", "0.57877636", "0.57838374", "0.5770291", "0.57529867", "0.5747681", "0.574741", "0.57248944", "0.5710859", "0.5696272", "0.5646029", "0.5645126", "0.5643636", "0.56245244", "0.56058323", "0.5581684", "0.5561957", "0.55611426", "0.5553255", "0.55504256", "0.55483896", "0.55327785", "0.5529135", "0.5527847", "0.5521145", "0.5519252", "0.5514723", "0.5514509", "0.5513332", "0.5507489", "0.5499423", "0.54831475", "0.54805505", "0.54720783", "0.5467269", "0.5464989", "0.546359", "0.5457426", "0.54549843", "0.5442979", "0.5441771", "0.5440094", "0.54353225", "0.5432584", "0.5432333", "0.5432333", "0.5432333", "0.5432333", "0.5432228", "0.5431988", "0.54258144", "0.54218024", "0.54180366", "0.5417113", "0.54103005", "0.5405097", "0.54008013", "0.53998476" ]
0.6111473
24
Preprocess and generate tip data.
def gen_tip_data(fp: str) -> None: with open(fp, encoding='utf-8') as f: for line in f: data = json.loads(line) utils.preprocess_raw_json(data) doc = { "_index": "tip", "_source": data } yield doc
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def preProcess(self, datum):\n pass", "def preprocess(self, data, label):\n\t\traise NotImplementedError", "def _build_preprocessing(self):\n\n # For now, do nothing\n pass", "def preprocess(self):", "def _preprocess(self):\n self.data['sentences'] = self.data['text'].apply(self._tokenize_sent)\n self.data['nouns'] = self.data['sentences'].apply(self._get_nouns)\n # self._get_frequent_features()\n # self._compactness_pruning()\n # self._redundancy_pruning()\n # self._get_features()\n self._extract_opinions()", "def preprocess(data):\n raise NotImplementedError", "def preprocess(self):\n pass", "def preprocess(self):\n pass", "def preprocess(self):\n pass", "def run_preprocessing(self, serie):\n pass", "def pre_process(self):\n pass", "def pre_process(self):\n pass", "def pre_process(self):\n pass", "def pre_process(self):\n pass", "def pre_process(self):\n pass", "def calculate_tip(meal_base, tip_rate):", "def test_data_preprocessing(raw_data): \r\n\r\n # get data output\r\n data_output = raw_data[['Submitby Date Time', 'Challenge Manager', 'Challenge Copilot', 'Posting Date Date', 'Track',\r\n 'Technology List', 'First Place Prize', 'Num Registrations', 'Total Prize']]\r\n with open('cache/extended_columns.pkl', 'rb') as f:\r\n extended_columns = pickle.load(f)\r\n with open('cache/num_date_columns.pkl', 'rb') as f:\r\n max_date_columns = pickle.load(f)\r\n \r\n data_output = class_binaryzation_for_test(data_output, extended_columns)\r\n try:\r\n data_output = date_separation1(data_output, max_num_columns=NUM_DATE_COLUMNS)\r\n except:\r\n data_output = date_separation2(data_output)\r\n data_output = money_digitalization(data_output)\r\n data_output = get_date_in_days(data_output)\r\n data_output['Days from Posting to Submit'] = data_output['Submitby Date Time Days from 2016'] \\\r\n - data_output['Posting Date Date Days from 2016'] \r\n\r\n return data_output", "def preprocess(self,data):\n preprocessObj = PreprocessData()\n preprocess_data = preprocessObj.preprocess(data)\n return preprocess_data", "def preprocess_train_data(self):\r\n print(\"* Preprocessing training data.\", flush=True)\r\n prep.create_HDF_file(self.C.training_set, is_training_set=True)\r\n\r\n self.print_time_elapsed()", "def prepare_data():\n user_name = os.environ.get('USER')\n traintest_corpus = ResumeCorpus('/Users/' + user_name + '/Documents/Data')\n random.shuffle(traintest_corpus.resumes)\n\n for resume in traintest_corpus.resumes:\n try:\n review_text = pre_processing(resume[0])\n review_text = \" \".join(review_text)\n data_dict['data'].append(review_text)\n data_dict['label'].append(resume[1])\n except:\n pass", "def _preprocess(self, data):\n\n # pipeline: first call the previous statistics:\n if self.previous_statistics is not None:\n data = self.previous_statistics.statistics(data)\n # the first of the statistics need to take list as input, in order to match the API. Then actually the\n # transformations work on np.arrays. In fact the first statistic transforms the list to array. Therefore, the\n # following code needs to be called only if the self statistic is the first, i.e. it does not have a\n # previous_statistic element.\n else:\n data = self._check_and_transform_input(data)\n\n return data", "def preprocess_data(self):\n\n self._preprocess_train_data()\n self._preprocess_test_data()", "def prepare_data():\n #data, label = load_ta_data(), load_ta_target()\n data, label = load_own_data(), load_own_target()\n tra_x, tst_x = split_samples(data)\n tra_y, tst_y = split_samples(label)\n return (tra_x, tst_x, tra_y, tst_y)", "def preprocess_test_data(self):\r\n print(\"* Preprocessing test data.\", flush=True)\r\n prep.create_HDF_file(self.C.test_set)\r\n\r\n self.print_time_elapsed()", "def preproc_data(data):\n # Load data manually from Yahoo! finance\n\n # Initialize TP Matrix\n # 3-dimension: # of stock * 18 * 18\n # narray\n _TP_matrixs = np.zeros(\n (len(data.ix[stockname]) - 230, 18, 18), dtype=np.bool)\n old = data.ix[stockname]['close'][229]\n TP_matrixs = pd.Panel(_TP_matrixs, items=data.ix[stockname].index[230:])\n label = np.zeros((len(data.ix[stockname]) - 230), dtype=np.float)\n dataindex = 0\n dataset = []\n # Construct TP Matrix\n for TP_matrix in TP_matrixs.iteritems():\n # Extract raw close price of last 230 days\n # pdb.set_trace()\n tp_features = np.zeros((18, 18), dtype=np.bool)\n _list_CP = data.ix[stockname][data.ix[stockname].index <\n TP_matrix[0]]['close'].tolist()\n list_CP = _list_CP[len(_list_CP) - 230: len(_list_CP)]\n close = data.ix[stockname]['close'][dataindex + 230]\n label = (close - old) / old\n old = close\n # col[0, 8] for Upward TP Matrix\n # col[9, 17] for Downward TP Matrix\n for col in range(0, 18):\n D = columns[col][0] - 1\n for row in range(0, 18):\n # For each element of TP Matrix\n for TP in range(D, columns[col][1]):\n # Change ratio of stock on day D with repect to the price\n # at TP\n C_TPD = (list_CP[TP] - list_CP[D]) / list_CP[D]\n if C_TPD * 100 >= rows[row][0] and C_TPD * 100 < rows[row][1]:\n TP_matrix[1][row][col] = True\n tp_features[row][col] = True\n break\n\n sample = DataSet()\n sample.tp_features = tp_features\n sample.labels = label\n dataindex += 1\n dataset.append(sample)\n\n filename = 'data/TP_matrix_' + stockname + '.pkl'\n output = open(filename, 'wb')\n # # Pickle dictionary using protocol 0.\n pickle.dump(TP_matrixs, output)\n output.close()\n return dataset", "def data_preprocessing_TA(X):\n \n #Removing the mean and scaling the data\n X_prep=StandardScaler().fit_transform(X)\n #do here your preprocessing\n return X_prep", "def classical_preprocessing(*args, **kwargs):\r\n qnode.construct(args, kwargs)\r\n return qml.math.stack(qnode.qtape.get_parameters())", "def preprocess_valid_data(self):\r\n print(\"* Preprocessing validation data.\", flush=True)\r\n prep.create_HDF_file(self.C.validation_set)\r\n\r\n self.print_time_elapsed()", "def pre_process(cls, *args, **kwargs):\n pass", "def pre_process(cls, *args, **kwargs):\n pass", "def pre_process(cls, *args, **kwargs):\n pass", "def pre_process(cls, *args, **kwargs):\n pass", "def preprocess_data(self):\n\n selected_data = []\n selected_name = []\n\n stat_temp = self.get_activated_num()\n stat_temp = OrderedDict(sorted(six.iteritems(stat_temp), key=lambda x: x[0]))\n\n plot_interp = 'Nearest'\n\n if self.scaler_data is not None:\n if len(self.scaler_data[self.scaler_data == 0]) > 0:\n logger.warning('scaler data has zero values at {}'.format(np.where(self.scaler_data == 0)))\n self.scaler_data[self.scaler_data == 0] = np.mean(self.scaler_data[self.scaler_data != 0])\n logger.warning('Use mean value {} instead for those points'.format(np.mean(self.scaler_data)))\n\n if self.scale_opt == 'Linear':\n for i, (k, v) in enumerate(six.iteritems(stat_temp)):\n\n if self.scaler_data is not None:\n if k in self.name_not_scalable:\n data_dict = self.dict_to_plot[k]\n else:\n data_dict = self.dict_to_plot[k]/self.scaler_data\n\n else:\n data_dict = self.dict_to_plot[k]\n\n selected_data.append(data_dict)\n selected_name.append(k) #self.file_name+'_'+str(k)\n\n else:\n for i, (k, v) in enumerate(six.iteritems(stat_temp)):\n\n if self.scaler_data is not None:\n if k in self.name_not_scalable:\n data_dict = np.log(self.dict_to_plot[k])\n else:\n data_dict = np.log(self.dict_to_plot[k]/self.scaler_data*self.ic_norm)\n\n else:\n data_dict = np.log(self.dict_to_plot[k])\n\n selected_data.append(data_dict)\n selected_name.append(k)\n\n return selected_data, selected_name", "def _prepare_data(self):\n #TODO hardcoded values need to change\n print_info(\"Preprocessing the train data...\")\n self._place_dataset(os.path.join(self._hparams[\"temp-data\"], \"train\"),\n self.TRAIN_OUT_PATH)\n\n print_info(\"Preprocessing the test data...\")\n self._place_dataset(os.path.join(self._hparams[\"temp-data\"], \"test\"),\n self.TEST_OUT_PATH)\n\n print_info(\"Preprocessing the validation data...\")\n self._place_dataset(os.path.join(self._hparams[\"temp-data\"], \"val\"),\n self.VAL_OUT_PATH)", "def preprocess(data):\n # Data Preprocessing\n data['GDP_scaled']=preprocessing.scale(data['GDP'])\n data['CLPRB_scaled']=preprocessing.scale(data['CLPRB'])\n data['EMFDB_scaled']=preprocessing.scale(data['EMFDB'])\n data['ENPRP_scaled']=preprocessing.scale(data['ENPRP'])\n data['NGMPB_scaled']=preprocessing.scale(data['NGMPB'])\n data['PAPRB_scaled']=preprocessing.scale(data['PAPRB'])\n data['PCP_scaled']=preprocessing.scale(data['PCP'])\n data['ZNDX_scaled']=preprocessing.scale(data['ZNDX'])\n data['OP_scaled']=preprocessing.scale(data['Nominal Price'])\n data['OP2_scaled']=preprocessing.scale(data['Inflation Adjusted Price'])\n\n return data", "def _preprocess_data(df, use_preprocessdata=False, save_path=None):\n data = _load_data(df, use_preprocessdata, save_path)\n X = []\n X2 = []\n X3 = []\n X4 = []\n for i, (words, indexes) in enumerate(data):\n X.append(\n _vectorise_bag_of_pos_with_position(words, indexes, DEFAULT_WINDOW_SIZE,\n targets=[df['Pronoun'][i], df['A'][i], df['B'][i]]))\n X2.append(_vectorise_bag_of_pos_with_dependency(words, indexes))\n X3.append(_get_dependency_labels(words, indexes, targets=[df['Pronoun'][i], df['A'][i], df['B'][i]]))\n X4.append(_get_gpt2_likelihood(words, indexes))\n\n X5 = _bert_attentions(df, data)\n X5 = np.array(X5)\n\n X = np.array(X)\n X2 = np.array(X2)\n featur_len = int(X.shape[1] / 3)\n featur_len2 = int(X2.shape[1] / 3)\n X_pr = X[:, 0:featur_len]\n X_a = X[:, featur_len:featur_len*2]\n X_b = X[:, featur_len*2:featur_len*3]\n X2_pr = X2[:, 0:featur_len2]\n X2_a = X2[:, featur_len2:featur_len2*2]\n X2_b = X2[:, featur_len2*2:featur_len2*3]\n X = np.concatenate((\n X_pr - X_a,\n X_pr - X_b,\n X_pr * X_a,\n X_pr * X_b,\n X2_pr - X2_a,\n X2_pr - X2_b,\n X2_pr * X2_a,\n X2_pr * X2_b,\n X3,\n X5,\n (df['Pronoun-offset'] - df['A-offset']).values.reshape(len(X), 1),\n (df['Pronoun-offset'] - df['B-offset']).values.reshape(len(X), 1)\n ), axis=1)\n Y = _get_classify_labels(df)\n return X, Y", "def preprocess(self):\n raise RuntimeError(\"please implement this function!\")", "def add_start_end_label(self, data, type=1):\n new_utts = []\n if type == 1:\n for line in data:\n title = [\"<s>\"] + line[0] + [\"</s>\"]\n context = [\"<s>\"] + line[1] + [\"</s>\"]\n target = [\"<s>\"] + line[2] + [\"</s>\"]\n new_utts.append([title, context, target])\n\n elif type == 2:\n for line in data:\n title = [\"<s>\"] + line[0] + [\"</s>\"]\n context = [\"<s>\"] + line[1] + [\"</s>\"]\n target = [\"<s>\"] + line[2] + [\"</s>\"]\n sentiment = line[3]\n new_utts.append([title, context, target, sentiment])\n\n elif type == 3:\n for line in data:\n new_utts.append([[\"<s>\"] + list(line) + [\"/s\"]])\n\n else:\n print(\"Invalid type in process function\")\n return\n\n return new_utts", "def preprocessing_pipeline(self):\n self.__multilabel_processing()\n self.__split_dataset()\n self.__save_datasets()", "def prepare_data(self):", "def __loadPreProcessedData(self):\n le = joblib.load(self.le_filename)\n X = np.loadtxt(self.X_filename, delimiter=',').astype(int)\n raw_y = np.loadtxt(self.y_filename, delimiter=',').astype(int)\n y = le.inverse_transform(raw_y)\n ##Initialize atrtribute for this class\n self.le, self.X, self.y = le, X, y", "def prepareData(self, *data):\n arguments = 8\n (self.X, self.X_name, self.Y, self.Y_name, self.alignment,\n self.model, self.annotations, self.args) = tuple(data[:arguments])\n \n self.width = self.args.beam_width\n self.mathType = self.args.mathType\n self.io_files = {\n 'input': self.args.intermediate_input_files,\n 'output': self.args.intermediate_output_files\n }\n self.repeat_width = self.args.repeat_width\n self.cons_count = self.args.cons_count\n self.posterior_processors = self.args.posterior_processors \n\n self.positionGenerator = \\\n list(AlignmentBeamGenerator(self.alignment, self.width))\n \n for i in range(len(self.model.states)):\n self.model.states[i].computeHints(self)\n\n return data[arguments:]", "def preprocess():\n # Load the data\n random.seed(77)\n X,y = make_classification(n_samples=500, n_features=30, n_informative=8, n_redundant=2, \n n_repeated=0, n_classes=3, n_clusters_per_class=2, weights=None, \n flip_y=0.01, class_sep=1.0, hypercube=True, shift=0.0, scale=1.0, \n shuffle=True, random_state=None)\n\n x_train, x_val, y_train, y_val = train_test_split(X, y, random_state=0, test_size=0.25)\n\n # Standardize the data\n scaler = StandardScaler()\n X_train = scaler.fit_transform(x_train)\n X_val = scaler.transform(x_val)\n\n \n return X_train,y_train,X_val,y_val", "def data_preprocessing():\n lineid_content = get_lineid_content()\n print('Read movie_lines.txt file complete...')\n convos = get_convos()\n print('Read movie_conversations.txt file complete...')\n print('Building dataset')\n get_data(lineid_content, convos)", "def get_tips(self):\n result = VGroup()\n if hasattr(self, \"tip\"):\n result.add(self.tip)\n if hasattr(self, \"start_tip\"):\n result.add(self.start_tip)\n return result", "def stage_two_preprocessing(data: pd.Series) -> pd.Series:\n # designed to be run after remove_contractions\n data_ = data.dropna()\n data_ = remove_punctuation(data_)\n data_ = numbers_to_words(data_)\n data_ = remove_stopwords(data_)\n return data_", "def preprocess(data,scale):\n ##log_transformation\n #data['log_sale_price'] = np.log(data['sale_price'])\n #data['log_lot_area'] = np.log(data['lot_area'])\n #data['house_age'] = data['year_sold']- data['year_built']\n \n y = data['stay']\n \n #sales['log_sale_price'] = np.log(sales['sale_price'])\n #sales['log_lot_area'] = np.log(sales['lot_area'])\n #sales['house_age'] = sales['year_sold']- sales['year_built']\n data_dummy = data.copy()\n \n #dummy coding\n data_scale = pd.get_dummies(data_dummy).drop(columns = ['stay'])\n\n \n #scale the value\n if scale == True:\n S = StandardScaler().fit(data_scale)\n data_scale = S.transform(data_scale)\n \n return y, data_scale", "def preprocess_data(extracted_data: List[Tuple[str, str]]) -> List[str]:\n return [f'__label__{data[0]} {clean_formatting(data[1])}' for data in extracted_data]", "def preprocess(self, X):\n X = X.copy()\n predictor_subset = self.predictor_subset.copy()\n if 'all' in predictor_subset:\n predictor_subset = add_all_predictors(predictor_subset, X.columns)\n \n use_temporal = 'temporal' in predictor_subset\n if use_temporal:\n X_temporal = get_temporal_predictors(\n X['TIMESTAMP_END']\n )\n predictor_subset.remove('temporal')\n\n X = X[predictor_subset]\n\n if use_temporal:\n X = pd.concat([X, X_temporal], axis=1)\n\n if 'WD' in predictor_subset:\n X = process_wind_direction_predictor(X)\n\n return X", "def preprocess_data(self, data, scale_data=True):\n print(\"preprocess_data not implemented\")\n return data", "def post_process(self, filename):\n title = self.title\n\n outfile = open(filename, 'r')\n data = outfile.readlines()\n\n name = data[0].strip()\n mode = data[1].strip()\n ops = data[2].strip().split(',')\n nl = 'True' in ops[0]\n ln = 'True' in ops[1]\n drv = 'True' in ops[2]\n\n data = data[3:]\n npt = len(data)\n\n t1u = np.empty((npt, ))\n t3u = np.empty((npt, ))\n t5u = np.empty((npt, ))\n flag = np.empty((npt, ), dtype=np.bool)\n x_dv = np.empty((npt, ))\n x_state = np.empty((npt, ))\n x_proc = np.empty((npt, ))\n\n for j, line in enumerate(data):\n x_dv[j], x_state[j], x_proc[j], flag[j], t1u[j], t3u[j], t5u[j] = line.strip().split(',')\n\n if np.any(flag):\n use_flag = True\n else:\n use_flag = False\n\n # Times are all normalized.\n t1 = t1u/t1u[0]\n t3 = t3u/t3u[0]\n t5 = t5u/t5u[0]\n\n if mode == 'state':\n x = x_state\n xlab = \"Number of states.\"\n elif mode == 'desvar':\n xlab = \"Number of design vars.\"\n x = x_dv\n elif mode == 'proc':\n x = x_proc\n xlab = \"Number of processors.\"\n\n if use_flag:\n\n flagtxt = self.flagtxt\n\n # Split them up. We know the pattern.\n t1F = t1[0::2]\n t1T = t1[1::2]\n t3F = t3[0::2]\n t3T = t3[1::2]\n t5F = t5[0::2]\n t5T = t5[1::2]\n\n xT = x[0::2]\n xF = x[1::2]\n\n # Generate plots\n\n if nl:\n plt.figure(1)\n plt.loglog(xF, t1F, 'bo-')\n plt.loglog(xT, t1T, 'ro-')\n\n plt.xlabel(xlab)\n plt.ylabel('Nonlinear Solve: Normalized Time')\n plt.title(title)\n plt.grid(True)\n if self.equal_axis:\n plt.axis('equal')\n plt.legend(['Default', flagtxt], loc=0)\n plt.savefig(\"%s_%s_%s.png\" % (name, mode, 'nl'))\n\n if ln:\n plt.figure(2)\n plt.loglog(xF, t3F, 'o-')\n plt.loglog(xT, t3T, 'ro-')\n\n plt.xlabel(xlab)\n plt.ylabel('Compute Totals: Normalized Time')\n plt.title(title)\n plt.grid(True)\n if self.equal_axis:\n plt.axis('equal')\n plt.legend(['Default', flagtxt], loc=0)\n plt.savefig(\"%s_%s_%s.png\" % (name, mode, 'ln'))\n\n if drv:\n plt.figure(3)\n plt.loglog(xF, t5F, 'o-')\n plt.loglog(xT, t5T, 'ro-')\n\n plt.xlabel(xlab)\n plt.ylabel(self.title_driver + ': Normalized Time')\n plt.title(title)\n plt.grid(True)\n if self.equal_axis:\n plt.axis('equal')\n plt.legend(['Default', flagtxt], loc=0)\n plt.savefig(\"%s_%s_%s.png\" % (name, mode, 'drv'))\n\n if self.special_plot_driver_on_linear:\n\n # Plot whatever driver does (e.g., coloring) on the same axis and normalization as linear time.\n t5 = t5u/t3u[0]\n t5F = t5[0::2]\n t5T = t5[1::2]\n\n plt.figure(4)\n plt.loglog(xF, t3F, 'o-')\n plt.loglog(xT, t3T, 'ro-')\n plt.loglog(xT, t5T, 'mo-')\n\n plt.xlabel(xlab)\n plt.ylabel('Normalized Time')\n plt.title(title)\n plt.grid(True)\n if self.equal_axis:\n plt.axis('equal')\n plt.legend(['Compute Totals', 'Compute Totals: ' + flagtxt, self.title_driver], loc=0)\n plt.savefig(\"%s_%s_%s.png\" % (name, mode, 'spec1'))\n\n else:\n\n # Generate plots\n\n if nl:\n plt.figure(1)\n plt.loglog(x, t1, 'o-')\n\n plt.xlabel(xlab)\n plt.ylabel('Nonlinear Solve: Normalized Time')\n plt.title(title)\n plt.grid(True)\n if self.equal_axis:\n plt.axis('equal')\n plt.savefig(\"%s_%s_%s.png\" % (name, mode, 'nl'))\n\n if ln:\n plt.figure(2)\n plt.loglog(x, t3, 'o-')\n\n plt.xlabel(xlab)\n plt.ylabel('Compute Totals: Normalized Time')\n plt.title(title)\n plt.grid(True)\n if self.equal_axis:\n plt.axis('equal')\n plt.savefig(\"%s_%s_%s.png\" % (name, mode, 'ln'))\n\n # For procs, we also view the time/proc as a function of number of procs.\n if mode == 'proc':\n plt.figure(3)\n plt.loglog(x, t3/x, 'o-')\n\n plt.xlabel(xlab)\n plt.ylabel('Compute Totals: Normalized Time per Processor')\n plt.title(title)\n plt.grid(True)\n if self.equal_axis:\n plt.axis('equal')\n plt.savefig(\"%s_%s_%s_per_proc.png\" % (name, mode, 'ln'))\n\n plt.show()\n print('done')", "def prepare(self):\n if self.pin.lower() == \"homo\":\n for i,line in enumerate(self.x):\n self.x[i] = [x - self.Lead_HOMOs_xval[i] for x in line]\n elif self.pin.lower() == \"lumo\":\n for i,line in enumerate(self.x):\n self.x[i] = [x - self.Lead_LUMOs_xval[i] for x in line]\n elif \"vac\" in self.pin.lower():\n for i,line in enumerate(self.x):\n self.x[i] = [x - self.vacuum[i] for x in line]\n elif \"ef\" in self.pin.lower():\n for i,line in enumerate(self.x):\n self.x[i] = [x - self.fermi_levels[i] for x in line]", "def preprocess(self, df):\n print(\"Started Processing....\")\n # binary conversion\n df.replace(to_replace=\"yes\", value=1, inplace=True)\n df.replace(to_replace=\"no\", value=0, inplace=True)\n\n # replace unknowns with nan\n df = df.replace(to_replace=\"unknown\", value=np.nan)\n # getting the list of columns with nan\n ml = df.columns[df.isna().any()].tolist()\n\n for item in ml:\n # getting the ratio of the index labels\n val = pd.DataFrame(df[item].value_counts(normalize=True))\n\n # index labels in a list\n valr = val.index.tolist()\n # drc.index = valr\n # columns values in a list\n valc = val[item].tolist()\n # replacing the nan values with ratio\n df[item] = df[item].fillna(pd.Series(np.random.choice(valr, p=valc, size=len(df))))\n\n # dependent variable\n dfy = df.iloc[:, -1]\n # independent variable\n dfx = df.iloc[:, :-1]\n\n # converting categorical data to numerical\n dfx = pd.get_dummies(dfx)\n\n # normalizing\n dfx = (dfx - dfx.min()) / (dfx.max() - dfx.min())\n\n dxdy = pd.concat([dfx, dfy], axis=1)\n\n # class balancing\n sm = RandomOverSampler(random_state=42)\n dfx, dfy = sm.fit_sample(dxdy.iloc[:, :-1], dxdy.iloc[:, -1])\n\n # converting to dataframe\n dfx = pd.DataFrame(dfx, columns=dxdy.iloc[:, :-1].columns.values)\n\n # dimensionality reduction\n pca = PCA(n_components=33)\n dfx = pca.fit_transform((dfx))\n\n print(\"Processing Done\")\n\n return dfx, dfy", "def precalculate():\n pass", "def precalculate():\n pass", "def stats_preprocessing(self):\n output = {'before_tot':[],\n 'before_unique':[],\n 'after_tot':[],\n 'after_unique':[]}\n for i in range(len(self.table)):\n description_raw = self.table.description.iloc[i].split(' ')\n clean_txt = self.table.clean_text.iloc[i].split(' ')\n\n output['before_tot'].append(len(description_raw))\n output['before_unique'].append(len(set(description_raw)))\n output['after_tot'].append(len(clean_txt))\n output['after_unique'].append(len(set(clean_txt)))\n \n print(\"\"\"Before preprocessing a description had on average {0} words with standard deviation {1}. \\n\nMoreover, the average of unique words was {2} and the standard deviation {3}.\"\"\"\\\n .format(round(mean(output['before_tot']), 2), round(stdev(output['before_tot']), 2), \n round(mean(output['before_unique']), 2), round(stdev(output['before_unique'])), 2))\n \n print(\"\"\"\\nAfter preprocessing a description has on average {0} words with standard deviation {1}. \\n \nThe average of unique words is now {2} and the standard deviation {3}.\"\"\"\\\n .format(round(mean(output['after_tot']), 2), round(stdev(output['after_tot']), 2), \n round(mean(output['after_unique']),2), round(stdev(output['after_unique']), 2)))\n\n return output", "def postprocessData(meta, units, data):\n\n data['time'] = np.arange(0, meta['dt'] * len(data), meta['dt'])\n units['time'] = 's'\n\n meta, units, data = self.calculateForce(meta, units, data)\n\n data['distance'] = np.sqrt(data.xDist**2 + data.yDist**2)\n units['distance'] = 'nm'\n\n return meta, units, data", "def _tidyBeforeRun (self):\n\t\tself._buildProps ()\n\t\tself._buildInput ()\n\t\tself._buildProcVars ()\n\t\tself._buildJobs ()", "def data_preprocessing(dat: pd.DataFrame, art='C', y=None, logger=None, remove=True):\n if logger == None:\n logging.basicConfig(\n level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n logger = logging.getLogger(__name__)\n \n logger.info('Start data preprocessing')\n # replace original indeices with default ones\n dat = dat.reset_index(drop=True)\n\n if art == 'C':\n logger.info('Start to label target feature y for classification task')\n dat.iloc[:, -1] = LabelEncoder().fit_transform(dat.iloc[:, -1])\n logger.info('End with label encoding the target feature')\n if remove:\n # remove columns with more than 1/2 na\n dat = dat.loc[:, dat.isna().sum()/len(dat) < .5]\n logger.info('Following features are removed from the dataframe because half of their value are NA: %s' %\n (dat.columns[dat.isna().sum()/len(dat) > .5].to_list()))\n # Encoding\n oe = OneHotEncoder(drop='first')\n # get categorical columns\n if y:\n dat_y = dat[[y]]\n cols = dat.columns.to_list()\n cols.remove(y)\n dat_x = dat[cols]\n else:\n dat_y = dat[[dat.columns[-1]]]\n dat_x = dat[dat.columns[:-1]]\n dat_categ = dat_x.select_dtypes(include=['object'])\n # get kterm of categ features\n for i in dat_categ.columns:\n # save output to dat\n tmp = dat_x[i].value_counts()\n dat_x[i + '_kterm'] = dat_x[i].map(lambda x: tmp[x] if x in tmp.index else 0)\n # float columns including the k term cols\n dat_numeric = dat_x.select_dtypes(include=['float32', 'float64', 'int32', 'int64'])\n # onehot encoding and label encoding\n dat_categ_onehot = dat_categ.iloc[:, dat_categ.apply(lambda x: len(x.unique())).values < 8]\n dat_categ_label = dat_categ.iloc[:, dat_categ.apply(lambda x: len(x.unique())).values >= 8]\n flag_onehot = False\n flag_label = False\n # oe\n if dat_categ_onehot.shape[1] > 0:\n logger.info('Start to do onehot to the following categoric features: %s' %\n (str(dat_categ_onehot.columns.to_list())))\n dat_onehot = pd.DataFrame(oe.fit_transform(dat_categ_onehot.astype(str)).toarray(),\n columns=oe.get_feature_names(dat_categ_onehot.columns))\n logger.info('End with onehot')\n flag_onehot = True\n else:\n dat_onehot = None\n # le\n if dat_categ_label.shape[1] > 0:\n logger.info('Start to do label encoding to the following categoric features: %s' %\n (str(dat_categ_label.columns.to_list())))\n dat_categ_label = dat_categ_label.fillna('NULL')\n dat_label = pd.DataFrame(columns=dat_categ_label.columns)\n for i in dat_categ_label.columns:\n dat_label[i] = LabelEncoder().fit_transform(dat_categ_label[i].astype(str))\n flag_label = True\n logger.info('End with label encoding')\n else:\n dat_label = None\n # scaling\n # combine\n dat_new = pd.DataFrame()\n if flag_onehot and flag_label:\n dat_new = pd.concat([dat_numeric, dat_onehot, dat_label], axis=1)\n elif flag_onehot:\n dat_new = pd.concat([dat_numeric, dat_onehot], axis=1)\n elif flag_label:\n dat_new = pd.concat([dat_numeric, dat_label], axis=1)\n else:\n dat_new = dat_numeric\n dat_new = pd.concat([dat_new, dat_y], axis=1)\n # imputation\n dat_new = dat_new.dropna(axis=1, how='all')\n if dat_new.isna().sum().sum() > 0:\n logger.info('Nan value exist, start to fill na with iterative imputer: ' +\n str(dat_new.isna().sum().sum()))\n # include na value, impute with iterative Imputer or simple imputer\n columns = dat_new.columns\n imp = IterativeImputer(max_iter=10, random_state=0)\n # imp = SimpleImputer(missing_values=np.nan, strategy='mean')\n dat_new = imp.fit_transform(dat_new)\n dat_new = pd.DataFrame(dat_new, columns=columns)\n dat_numeric = dat_new.iloc[:, :-1].select_dtypes(include=['float32', 'float64', 'int32', 'int64'])\n logger.info('End with fill nan')\n return dat_new, dat_numeric.columns", "def preprocess(self):\n\n if self.x_range == None:\n x_min = min(np.min(self.fx), np.min(self.gx))\n x_max = max(np.max(self.fx), np.max(self.gx))\n self.x_range = [x_min,x_max]\n\n f_inter = interpolate.interp1d(self.fx, self.fy, 'cubic', fill_value = 'extrapolate')\n g_inter = interpolate.interp1d(self.gx, self.gy, 'cubic', fill_value = 'extrapolate')\n fgx_new = np.linspace(self.x_range[0], self.x_range[1], self.N)\n fy_new = f_inter(fgx_new)\n gy_new = g_inter(fgx_new)\n\n self.fx, self.fy = fgx_new, fy_new\n self.gx, self.gy = fgx_new, gy_new", "def preprocessNode(self):\n pass", "def _preprocess_data_point(self, x: DataPoint) -> DataPoint:\n for preprocessor in self._pre:\n x = preprocessor(x)\n if x is None:\n raise ValueError(\"Preprocessor should not return None\")\n return x", "def preprocessing(self):\n # Standardizing series names\n self.raw.columns = ['stress', 'strain', 'e']\n # Removing percentage format to strain values\n if self.strainPercent:\n self.raw['strain'] = self.raw['strain'].divide(100)\n # On-table (initial) void ratio\n self.e_0 = self.raw['e'].iloc[0]\n return", "def transform(self, data):", "def set_initial_likelihoods(self):\n # get values as lists of [0, 1] or [1, 0]\n values = ([float(1 - i), float(i)] for i in self.data)\n\n # get range of tip idxs (0-ntips)\n keys = range(0, len(self.data))\n\n # map values to tips {0:x, 1:y, 2:z...}\n valuesdict = dict(zip(keys, values))\n\n # set as .likelihood attributes on tip nodes.\n self.tree = self.tree.set_node_values(\n feature=\"likelihood\", \n values=valuesdict,\n default=None,\n )\n\n logger.debug(f\"set tips values: {valuesdict}\")", "def _preprocess_values(self,Y):\r\n return Y", "def _preprocess_values(self,Y):\r\n return Y", "def pre_process_data(self, all_labels, all_data):\n\n # [1] Normalizes data\n all_data = self.pre_precess_manager.normalization(all_data)\n\n data_train, data_test, label_train, label_test = train_test_split(all_data, all_labels, test_size=0.1,\n shuffle=True)\n\n return data_train, data_test, label_train, label_test", "def _preprocessing(self):\n if self.resize:\n self.click_list = self._remapping_coord(self.click_list,\n self.input_size,\n self.orig_size)\n clickers = self._get_clickers(self.click_list)\n clicks_list = clickers.get_clicks()\n clicks_lists = self._points_transform([clicks_list], self.image_width)\n points_nd = self._get_points_nd(clicks_lists, self.net_clicks_limit)\n return points_nd", "def __prepro_testdata(dict_testdata):\n if not dict_testdata or check_df == False:\n test_corpus = list()\n # create connection to testdata and traindata input\n conn_test = connection_preparation.conn_testing()\n # load data for testdata\n dict_testdata = manage_dfs.get_df(conn_test)\n else:\n pass\n\n # PREPROCESSING TESTDATA\n for name, df in dict_testdata.items():\n logging.info(f'preprocessing for testdata table {name} started.')\n test_corpus = preprocessing.preprocess_data(df, step_key)\n dict_testdata_prepro[name] = test_corpus", "def inference_preprocess(self):\n return", "def preprocess_main():", "def preprocess(self):\n\n self._build_labels_dict(['one', 'two', 'three', 'four', 'five'])\n\n with open(self.data_path + self.file_name, 'rb') as csvfile:\n\n reader = csv.reader(csvfile, delimiter=\",\")\n for row in reader:\n self.texts.append(row[1])\n self.labels.append(self.labels_index[row[0]])\n\n print('Found %s texts.' % len(self.texts))", "def data_preparation(self) -> None:\n self.logger.info('data cleaning')\n self.logger.info('num of secs: {}, num of ipo_dates: {}, num of secs with prices: {}'.format(\n len(self.data),\n len(self.ipo_dates),\n len(self.prices)\n ))\n excluded = []\n excluded = [i.lower() for i in excluded]\n self.logger.info(f'number of excluded: {len(excluded)}')\n for i in excluded:\n self.data.pop(i)\n for s in self.data:\n # columns with empty assets sum (empty columns and other situations)\n self.data[s].dropna(axis='columns', how='any', subset=['A_0'], inplace=True)\n # columns with descriptions (polish and english names of values)\n self.data[s].drop(self.data[s].columns[[0, 1]], inplace=True, axis=1)\n\n self.logger.info(f'number of secs after cleaning: {len(self.data)}')\n data_list = [k for k in self.data.values()]\n self.uber_data = pd.concat(data_list, ignore_index=True, axis=1)\n self.uber_data = self.uber_data.transpose()\n self.uber_data = self.uber_data.loc[:, pd.notnull(self.uber_data.columns)]", "def get_preprocessed_seq_data(self, raw_data, cls):\n # Check that input data has unique ids\n self._check_unique_ids(raw_data)\n\n cls_id = int(self.class_name_to_class_id[cls])\n\n data_keys = ['gt_ids', 'tracker_ids', 'gt_dets', 'tracker_dets', 'similarity_scores']\n data = {key: [None] * raw_data['num_timesteps'] for key in data_keys}\n unique_gt_ids = []\n unique_tracker_ids = []\n num_gt_dets = 0\n num_tracker_dets = 0\n for t in range(raw_data['num_timesteps']):\n\n # Only extract relevant dets for this class for preproc and eval (cls)\n gt_class_mask = np.atleast_1d(raw_data['gt_classes'][t] == cls_id)\n gt_class_mask = gt_class_mask.astype(np.bool)\n gt_ids = raw_data['gt_ids'][t][gt_class_mask]\n gt_dets = [raw_data['gt_dets'][t][ind] for ind in range(len(gt_class_mask)) if gt_class_mask[ind]]\n\n tracker_class_mask = np.atleast_1d(raw_data['tracker_classes'][t] == cls_id)\n tracker_class_mask = tracker_class_mask.astype(np.bool)\n tracker_ids = raw_data['tracker_ids'][t][tracker_class_mask]\n tracker_dets = [raw_data['tracker_dets'][t][ind] for ind in range(len(tracker_class_mask)) if\n tracker_class_mask[ind]]\n similarity_scores = raw_data['similarity_scores'][t][gt_class_mask, :][:, tracker_class_mask]\n\n # Match tracker and gt dets (with hungarian algorithm)\n unmatched_indices = np.arange(tracker_ids.shape[0])\n if gt_ids.shape[0] > 0 and tracker_ids.shape[0] > 0:\n matching_scores = similarity_scores.copy()\n matching_scores[matching_scores < 0.5 - np.finfo('float').eps] = -10000\n match_rows, match_cols = linear_sum_assignment(-matching_scores)\n actually_matched_mask = matching_scores[match_rows, match_cols] > 0 + np.finfo('float').eps\n match_cols = match_cols[actually_matched_mask]\n\n unmatched_indices = np.delete(unmatched_indices, match_cols, axis=0)\n\n # For unmatched tracker dets, remove those that are greater than 50% within a crowd ignore region.\n unmatched_tracker_dets = [tracker_dets[i] for i in range(len(tracker_dets)) if i in unmatched_indices]\n ignore_region = raw_data['gt_ignore_region'][t]\n intersection_with_ignore_region = self._calculate_mask_ious(unmatched_tracker_dets, [ignore_region],\n is_encoded=True, do_ioa=True)\n is_within_ignore_region = np.any(intersection_with_ignore_region > 0.5 + np.finfo('float').eps, axis=1)\n\n # Apply preprocessing to remove unwanted tracker dets.\n to_remove_tracker = unmatched_indices[is_within_ignore_region]\n data['tracker_ids'][t] = np.delete(tracker_ids, to_remove_tracker, axis=0)\n data['tracker_dets'][t] = np.delete(tracker_dets, to_remove_tracker, axis=0)\n similarity_scores = np.delete(similarity_scores, to_remove_tracker, axis=1)\n\n # Keep all ground truth detections\n data['gt_ids'][t] = gt_ids\n data['gt_dets'][t] = gt_dets\n data['similarity_scores'][t] = similarity_scores\n\n unique_gt_ids += list(np.unique(data['gt_ids'][t]))\n unique_tracker_ids += list(np.unique(data['tracker_ids'][t]))\n num_tracker_dets += len(data['tracker_ids'][t])\n num_gt_dets += len(data['gt_ids'][t])\n\n # Re-label IDs such that there are no empty IDs\n if len(unique_gt_ids) > 0:\n unique_gt_ids = np.unique(unique_gt_ids)\n gt_id_map = np.nan * np.ones((np.max(unique_gt_ids) + 1))\n gt_id_map[unique_gt_ids] = np.arange(len(unique_gt_ids))\n for t in range(raw_data['num_timesteps']):\n if len(data['gt_ids'][t]) > 0:\n data['gt_ids'][t] = gt_id_map[data['gt_ids'][t]].astype(np.int)\n if len(unique_tracker_ids) > 0:\n unique_tracker_ids = np.unique(unique_tracker_ids)\n tracker_id_map = np.nan * np.ones((np.max(unique_tracker_ids) + 1))\n tracker_id_map[unique_tracker_ids] = np.arange(len(unique_tracker_ids))\n for t in range(raw_data['num_timesteps']):\n if len(data['tracker_ids'][t]) > 0:\n data['tracker_ids'][t] = tracker_id_map[data['tracker_ids'][t]].astype(np.int)\n\n # Record overview statistics.\n data['num_tracker_dets'] = num_tracker_dets\n data['num_gt_dets'] = num_gt_dets\n data['num_tracker_ids'] = len(unique_tracker_ids)\n data['num_gt_ids'] = len(unique_gt_ids)\n data['num_timesteps'] = raw_data['num_timesteps']\n data['seq'] = raw_data['seq']\n data['cls'] = cls\n\n # Ensure again that ids are unique per timestep after preproc.\n self._check_unique_ids(data, after_preproc=True)\n\n return data", "def _preprocess_values(self,Y):\n return Y", "def preprocess(self):\n\n mm_magcoord.add_aacgm_coordinates(self)\n mm_magcoord.add_quasi_dipole_coordinates(self)\n mm_sc.calculate_ecef_velocity(self)\n mm_sc.add_ram_pointing_sc_attitude_vectors(self)\n\n return", "def test_pyt_preprocess_train(self):\n # Second, check that the model will train\n defaults = parser_defaults.copy()\n defaults['datatype'] = 'train'\n defaults['pytorch_preprocess'] = True\n str_output, _, _ = testing_utils.train_model(defaults)\n self.assertTrue(\n solved_task(str_output),\n 'Teacher could not teach seq2seq with preprocessed obs, output: {}'\n .format(str_output)\n )", "def preprocess(self, instances, stats=None, **kwargs):\n pass", "def convert_data (data_taxi,density):\n \n n_trips = len(data_taxi)\n \n min_longitude = min(min(list(data_taxi.loc[:,'pickup_longitude'])),\n min(list(data_taxi.loc[:,'dropoff_longitude'])))\n max_longitude = max(max(list(data_taxi.loc[:,'pickup_longitude'])),\n max(list(data_taxi.loc[:,'dropoff_longitude'])))\n min_latitude = min(min(list(data_taxi.loc[:,'pickup_latitude'])),\n min(list(data_taxi.loc[:,'dropoff_latitude'])))\n max_latitude = max(max(list(data_taxi.loc[:,'pickup_latitude'])),\n max(list(data_taxi.loc[:,'dropoff_latitude'])))\n \n e_longitude = max_longitude - min_longitude\n \n e_latitude = max_latitude - min_latitude\n \n scale =np.sqrt( n_trips/( e_longitude* e_latitude * density) )\n\n taxis = []\n \n for i in range(n_trips):\n selected_taxi = data_taxi.iloc[i]\n departure = [int((selected_taxi.pickup_longitude - min_longitude) * scale),\n int((selected_taxi.pickup_latitude - min_latitude) * scale),\n ]\n \n arrival = [\n int((selected_taxi.dropoff_longitude - min_longitude) * scale),\n int((selected_taxi.dropoff_latitude - min_latitude) * scale)]\n \n taxis.append(taxi(departure,arrival,departure))\n return taxis,int(scale*(e_latitude))+1,int(scale*(e_longitude))+1", "def _preprocess(self, data, normalize=False) -> np.ndarray:\n \n preprocessor = StandardScaler() if not normalize else Normalizer()\n\n data = preprocessor.fit_transform(data)\n \n return data", "def preprocess_raw(self):\n pass", "def preprocess(data, to_drop=[]):\n \n columns = data.columns.to_list()\n \n # split data to numeric vs categorical\n numeric_features = data.select_dtypes(include=[\n 'int64', 'float64']).columns\n \n if len(to_drop) > 0:\n categorical_features = data.select_dtypes(include=[\n 'object']).drop(to_drop, axis=1).columns\n print(categorical_features)\n else: \n categorical_features = data.select_dtypes(include=[\n 'object']).columns\n \n categorical_transformer = Pipeline(steps=[\n ('imputer', SimpleImputer(strategy='most_frequent', fill_value='missing'))])\n \n numerical_transformer = Pipeline(steps=[\n ('imputer', SimpleImputer(strategy='median')),\n ('scaler', RobustScaler())\n ])\n # missing_values = np.nan\n \n# Bundle preprocessing for numerical and categorical data\n preprocessor = ColumnTransformer(\n transformers=[\n ('num', numerical_transformer, numeric_features),\n ('cat', categorical_transformer, categorical_features)\n ])\n\n my_pipeline = Pipeline(steps=[('preprocessor', preprocessor) ])\n \n for col in to_drop:\n columns.remove(col)\n print('Hello')\n \n trans_data = my_pipeline.fit_transform(data)\n return trans_data#pd.DataFrame(#, columns=columns) ", "def get_preprocess(self) -> Dict:\n raise NotImplementedError", "def pre_process_data():\n data_list, header_list = Parser.__parse_csv_data(Parser.training_data_file)\n table = pandas.DataFrame(data_list, columns=header_list)\n table.drop(['date', 'employee id'], axis=1, inplace=True)\n unique_categories = table['category'].unique()\n unique_expense_desc = table['expense description'].unique()\n unique_tax_name = table['tax name'].unique()\n\n column_index = {\n 'input': {},\n 'output': {}\n }\n\n column_index['input']['pre-tax amount'] = {\n 'column_index': 0,\n 'type': 'int'\n }\n\n column_index['input']['tax amount'] = {\n 'column_index': 1,\n 'type': 'int'\n }\n\n index = 2\n\n for i in range(len(unique_expense_desc)):\n column_index['input'][unique_expense_desc[i]] = {\n 'column_index': i + index,\n 'type': 'str'\n }\n\n index += len(unique_expense_desc)\n\n for i in range(len(unique_tax_name)):\n column_index['input'][unique_tax_name[i]] = {\n 'column_index': i + index,\n 'type': 'str'\n }\n\n for i in range(len(unique_categories)):\n column_index['output'][unique_categories[i]] = {'value': i}\n\n Parser.__save_column_index(column_index)", "def preprocess(data):\n\tprint(\"\\n--------Data preview--------\\n{0}\".format(data.head()))\n\t# transform datatime columns to four columns includes the year、month、day、hour\n\tdata['year'] = pd.DatetimeIndex(data['datetime']).year\n\tdata['month'] = pd.DatetimeIndex(data['datetime']).month\n\tdata['day'] = pd.DatetimeIndex(data['datetime']).day\n\tdata['hour'] = pd.DatetimeIndex(data['datetime']).hour\n\n\tdata[\"date\"] = data.datetime.apply(lambda x : x.split()[0])\n\tdata[\"weekday\"] = data.date.apply(lambda dateString :\n\t calendar.day_name[datetime.strptime(dateString,\"%Y-%m-%d\").weekday()])\n\t# after transformed delete the 'datatime' column\n\tdataDroped = data.drop(['datetime'], axis=1)\n\tprint(\"\\n-------\\nAfter preprocess(transform time display format to avoid object data type)\\n-------\")\n\treturn dataDroped", "def _data_preproc(self, X, y, X_test, y_test=None):\n \n X = np.array(X)\n y = np.array(y)\n X_test = np.array(X_test)\n y_test = np.array(y_test) \n\n # y need to be a column:\n if y.shape == y.flatten().shape:\n y = y.reshape(-1, 1)\n\n # Scale the data\n stda = StandardScaler()\n stda.fit(np.vstack([X, X_test]))\n\n X_test = stda.transform(X_test)\n X = stda.transform(X)\n\n # Stack target to X (train)\n X = np.column_stack((y, X))\n\n # Stack id to X_test\n #X_test = np.column_stack((ids, X_test))\n\n # Export to txt files (, del.)\n np.savetxt(self._train_file, X, delimiter=\",\", fmt='%.5f')\n np.savetxt(self._test_file, X_test, delimiter=\",\", fmt='%.5f')", "def unusedFromKDOTDataPreparation():", "def data_preprocessing(dataset):\r\n df = pd.read_csv(dataset)\r\n df.head()\r\n df.describe()\r\n df.isnull().sum()\r\n df= df.drop(['instant'], axis=1)\r\n df['dteday'] = pd.to_datetime(df['dteday'].apply(str) + ' ' + df['hr'].apply(str) + ':00:00')\r\n return df", "def stage_one_preprocessing(data: pd.Series) -> pd.Series:\n data_ = data.dropna()\n print('ascii')\n data_ = remove_non_ascii(data)\n print('lower')\n data_ = to_lowercase(data_)\n print('slash')\n data_ = underscore_and_slash_to_space(data_)\n print('ellipse')\n data_ = remove_ellipses(data_)\n print('white')\n data_ = shrink_whitespace(data_)\n #print('contracts')\n #data_ = remove_contractions(data_)\n return data_", "def training_data_preprocessing(raw_data, num_passed_rows=72):\r\n # some samples have errors\r\n raw_data = raw_data[num_passed_rows:].reset_index(drop=True) \r\n \r\n # get data output\r\n data_output = raw_data[['Submitby Date Time', 'Challenge Manager', 'Challenge Copilot', 'Posting Date Date', 'Track',\r\n 'Technology List', 'First Place Prize', 'Num Registrations', 'Total Prize']]\r\n data_output, extended_columns = class_binaryzation(data_output)\r\n \r\n # save extended columns to cache\r\n extended_columns_filepath = 'cache/extended_columns.pkl'\r\n with open(extended_columns_filepath, 'wb') as f:\r\n pickle.dump(extended_columns, f)\r\n\r\n num_date_columns_filepath = 'cache/num_date_columns.pkl'\r\n try:\r\n data_output = date_separation1(data_output) \r\n with open(num_date_columns_filepath, 'wb') as f:\r\n pickle.dump(6, f)\r\n\r\n except:\r\n data_output = date_separation2(data_output)\r\n with open(num_date_columns_filepath, 'wb') as f:\r\n pickle.dump(5, f)\r\n\r\n data_output = money_digitalization(data_output)\r\n data_output = get_date_in_days(data_output)\r\n data_output['Days from Posting to Submit'] = data_output['Submitby Date Time Days from 2016'] \\\r\n - data_output['Posting Date Date Days from 2016'] \r\n \r\n # get other output\r\n label_output = pd.DataFrame(columns=['Success'])\r\n success_output = pd.DataFrame(columns=data_output.columns)\r\n failure_output = pd.DataFrame(columns=data_output.columns)\r\n for i in range(len(raw_data)):\r\n if raw_data.loc[i, 'Num Submissions Passed Review'] >= 1:\r\n label_output.loc[i, 'Success'] = 1\r\n success_output.loc[len(success_output)] = data_output.loc[i]\r\n else:\r\n label_output.loc[i, 'Success'] = 0\r\n failure_output.loc[len(failure_output)] = data_output.loc[i]\r\n\r\n return data_output, label_output, success_output, failure_output, extended_columns", "def _construct(self, data):\n logging.info(\"overall constructing (enter)\")\n\n pre_construct_data = self._pre_construct(data)\n # Kickstart the seralizin'.\n\n #if it found no ids, then we can just use the pre construct data\n if any((len(ids) > 0 for label, ids in self.ids.iteritems())):\n self.data = collections.defaultdict(dict)\n\n\n for manip in self.manips:\n manip()\n\n logging.debug(\"constructing (enter)\")\n # extend the output using the collated data we've found\n data = self.construct(data)\n logging.debug(\"constructing (exit)\")\n\n logging.debug(\"overall constructing (exit)\")\n return data\n else:\n logging.debug(\"overall constructing (exit)\")\n return pre_construct_data", "def _pre_process(self, x):\n return x", "def process(self):\n self.extract()\n self.transform()\n self.load()", "def submit_tip(tip_data):\n\n text = tip_data['message']\n out = \"\"\n\n bot = SlackBot()\n response = bot.send_tip(**tip_data)\n\n try:\n if response.get(\"error_code\") == \"invalid_sender\":\n out = MESSAGES[\"get_started\"]\n elif response.get(\"error_code\") == \"duplicate_context_uid\":\n out = MESSAGES[\"duplicate\"]\n elif response.get(\"error_message\"):\n if response.get(\"error_code\") in [\"tip_limit\", \"wallet_error\", \"pocket_error\"]:\n out = \"This tip cannot be completed\"\n else:\n out = response.get(\"error_message\")\n elif response.get(\"state\") in [\"ok\", \"accepted\"]:\n tip = response[\"tip\"]\n\n if tip[\"status\"] == \"out for delivery\":\n out += MESSAGES[\"out_for_delivery\"].format(amount_display=tip[\"amount_display\"], receiver=tip[\"receiver\"])\n elif tip[\"status\"] == \"finished\":\n out += MESSAGES[\"finished\"].format(amount_display=tip[\"amount_display\"], receiver=tip[\"receiver\"], img_url=tip['meta'].get('tip_img_url', ''))\n\n out = append_image_response(text, out)\n\n if \"+debug\" in text:\n out += \"\\n```\\n%s\\n```\" % json.dumps(response, indent=2)\n\n except Exception as e:\n if \"+debug\" in text:\n return \"output formatting error with: {}\".format(e)\n\n return out", "def preprocess_(data, categories=None):\n res = []\n for x in data:\n if len(x['points']) < 3:\n continue\n if x['geomType'] == 3:\n x['base_curve'] = x['points'][:6]\n x['points'] = x['points'][6:]\n\n elif x['geomType'] == 9:\n x['base_curve'] = x['points'][:3]\n x['points'] = x['points'][3:]\n else:\n continue\n dst = {**x['attrs'], **x['attrd'], **x['attri']}\n x['attrs'] = dst\n x.pop('attrd')\n x.pop('attri')\n x['ogeomType'] = x['geomType']\n x['geomType'] = 8\n x['uid'] = int(x['attrs']['ElementID'])\n x['category'] = x['attrs']['Category']\n if categories is None or x['category'] in categories:\n rs = list(filter(None, geom.RevitInterface.factory(**x)))\n res.extend(rs)\n return res", "def _process(self):\n # choose the correct transform model before processing TI data\n self._select_transform()\n\n # process type first, fail early\n self._process_type()\n\n # process type specific data\n if isinstance(self.transform, GroupTransformModel):\n self._process_group()\n elif isinstance(self.transform, IndicatorTransformModel):\n self._process_indicator()\n\n # self.process_associations(self.transform.associations)\n self._process_associated_group(self.transform.associated_groups)\n self._process_attributes(self.transform.attributes or [])\n self._process_security_labels(self.transform.security_labels or [])\n self._process_tags(self.transform.tags or [])\n\n # date added\n self._process_metadata_datetime('dateAdded', self.transform.date_added)\n\n # last modified\n self._process_metadata_datetime('lastModified', self.transform.last_modified)\n\n # xid\n self._process_metadata('xid', self.transform.xid)", "def prep_data(self):\n\n self.fit_tokenizer(texts=self.texts)\n sequences = self.get_sequences(self.texts)\n self.text_data = pad_sequences(sequences, maxlen=self.MAX_SEQUENCE_LENGTH)\n\n self.labels = to_categorical(np.asarray(self.labels))\n print('Shape of data tensor:', self.text_data.shape)\n print('Shape of label tensor:', self.labels.shape)\n\n # split the data into a training set and a validation set\n indices = np.arange(self.text_data.shape[0])\n np.random.shuffle(indices)\n self.text_data = self.text_data[indices]\n self.labels = self.labels[indices]\n nb_validation_samples = int(self.VALIDATION_SPLIT * self.text_data.shape[0])\n\n x_train = self.text_data[:-nb_validation_samples]\n y_train = self.labels[:-nb_validation_samples]\n x_val = self.text_data[-nb_validation_samples:]\n y_val = self.labels[-nb_validation_samples:]\n\n return x_train,y_train, x_val, y_val", "def _preprocess_training_model(self, data):\n def _pre_process(raw_data):\n \"\"\" Pre-process raw data. \"\"\"\n pattern = re.compile(\n r\"((?<=')\\w\\d.*?(?=')|(?<=\\\")\\w\\d.*?(?=\\\")|[\\w\\d]+)\")\n words = re.findall(pattern, raw_data)\n return ' '.join(list(map(string_utils.snake_case_to_camel, words)))\n\n data_list = []\n # Preprocess the dataset with naming convention, etc.\n with Progress() as progress:\n preprocess_task = progress.add_task('Pre-processing dataset...',\n total=data.shape[0])\n for idx, row in data.iterrows():\n row_data = {}\n for column in ['text', 'key', 'value']:\n row_data[column] = _pre_process(row[column])\n data_list.append(row_data)\n progress.update(preprocess_task, advance=1)\n return pd.DataFrame(data=data_list)", "def get_tips(self, padding=False):\n tips = {'tips': [{'x': int(v[0]), 'y': int(v[1])}\n for v in self._graph.leaf_nodes(coord=True)]}\n \n # Fill the rest with None if the user wants\n if padding:\n remaining = (self.max_inst * self.max_tips) - len(tips['tips'])\n for _ in range(remaining):\n tips['tips'].append(None)\n \n return tips" ]
[ "0.6318788", "0.6080475", "0.60501426", "0.5997665", "0.58562475", "0.5852716", "0.5846063", "0.5846063", "0.5846063", "0.5781888", "0.56678724", "0.56678724", "0.56678724", "0.56678724", "0.56678724", "0.5593358", "0.55858773", "0.55671465", "0.55429596", "0.5542602", "0.5521993", "0.5512513", "0.5502827", "0.54544646", "0.5432253", "0.5396715", "0.5388178", "0.53628784", "0.53528464", "0.53528464", "0.53528464", "0.53528464", "0.5343425", "0.529332", "0.529125", "0.5291135", "0.5259695", "0.5228543", "0.52265114", "0.5218406", "0.52016145", "0.51951593", "0.5193772", "0.51910615", "0.5184686", "0.5178979", "0.51777095", "0.51724386", "0.5169896", "0.5132204", "0.5116306", "0.51151127", "0.51006037", "0.50969595", "0.50969595", "0.5093829", "0.50815696", "0.5081082", "0.50712043", "0.5069426", "0.5065376", "0.50637704", "0.5059675", "0.5052462", "0.5052136", "0.50473064", "0.50473064", "0.50461936", "0.50356734", "0.5024974", "0.502242", "0.5021934", "0.5016631", "0.5006197", "0.50027764", "0.49985653", "0.4995407", "0.49947286", "0.49853066", "0.49767572", "0.49635598", "0.49570557", "0.49541914", "0.4948388", "0.49479383", "0.4928759", "0.4922013", "0.49108925", "0.48990586", "0.48941532", "0.4892877", "0.48902154", "0.48869157", "0.48798874", "0.48716927", "0.48698157", "0.4867309", "0.4867063", "0.48660588", "0.48651433" ]
0.54621476
23
Generates a link allowing the data in a given panda dataframe to be downloaded
def filedownload(df): csv = df.to_csv(index=False) b64 = base64.b64encode(csv.encode()).decode() # some strings <-> bytes conversions necessary here href = f'<a href="data:file/csv;base64,{b64}" download="player.csv">Download csv file</a>' return href
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_table_download_link(df, file_name):\n if 'embedding_average' in df.columns:\n df = df.drop(columns='embedding_average')\n # df = results_output.drop(columns='embedding_average')\n # csv = df.to_csv(index=False)\n # b64 = base64.b64encode(csv.encode()).decode() # some strings <-> bytes conversions necessary here\n # href = f'<a href=\"data:file/csv;base64,{encoded}\">Download Excel File</a> (right-click and save as &lt;some_name&gt;.csv)'\n # href = f'<a href=\"data:file/csv;base64,{b64}\">Download CSV File</a> (right-click and save as &lt;some_name&gt;.csv)'\n towrite = io.BytesIO()\n df.to_excel(towrite,index = False, encoding = 'UTF-8') # write to BytesIO buffer\n towrite.seek(0) # reset pointer\n encoded = base64.b64encode(towrite.read()).decode() # encoded object\n href = f'<a href=\"data:file/csv;base64,{encoded}\" download =\"{file_name}\">Download Excel File</a> (right-click and save as &lt;some_name&gt;.csv)'\n st.markdown(href, unsafe_allow_html=True)", "def get_table_download_link(df):\n csv = df.to_csv(index=False)\n b64 = base64.b64encode(csv.encode()).decode() # some strings <-> bytes conversions necessary here\n file_name = 'vaccination_locations_'+county\n file_name = file_name.replace(\" \", \"_\").replace(\",\", \"\")\n href = f'<a href=\"data:file/csv;base64,{b64}\" download={file_name}>Download Vaccination Site <br>Locations as CSV File</a>'\n return href", "def get_table_download_link(df):\n csv = df.to_csv(index=False)\n b64 = base64.b64encode(\n csv.encode()\n ).decode() # some strings <-> bytes conversions necessary here\n return f'<a href=\"data:file/csv;base64,{b64}\" download=\"yourownquery.csv\">Download Your very Own Query Searched csv file!!!</a>'", "def get_table_download_link(df):\r\n\tval = to_excel(df)\r\n\tb64 = base64.b64encode(val).decode() # val looks like b'...'\r\n\thref=f'<a href=\"data:application/octet-stream;base64,{b64}\" download=\"captura.xlsx\" target=\"_blank\">Descargar: Haga clic derecho y guardar enlace como...</a>' # decode b'abc' => abc\t\r\n\treturn href", "def get_table_download_link(df):\n csv = df.to_csv(index=True)\n # some strings <-> bytes conversions necessary here\n b64 = base64.b64encode(csv.encode()).decode()\n href = f'<a href=\"data:file/csv;base64,{b64}\">Download csv file</a>'\n return(href)", "def get_table_download_link(df,location):\n csv = df.to_csv(index=False)\n b64 = base64.b64encode(csv.encode()).decode() # some strings <-> bytes conversions necessary here\n href = f\"\"\"\n <link rel=\"stylesheet\" href=\"https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css\">\n <a href=\"data:file/csv;base64,{b64}\" download=\"{location}.csv\" style='text-decoration: inherit;'>\n <button style=\"background-color: DodgerBlue;border: none;color: white;padding: 12px 30px;cursor: pointer;font-size: 20px; display: block; \n margin-left: auto; font-size:100%;\n margin-right: auto;\n width: 40%;\"><i class=\"fa fa-download\"></i> Download {location}</button>\n </a>\n \n \"\"\"\n return href", "def get_table_download_link(df):\n csv = df.to_csv(index=False)\n b64 = base64.b64encode(csv.encode()).decode() # some strings <-> bytes conversions necessary here\n href = f'<a href=\"data:file/csv;base64,{b64}\" download=\"results.csv\">Download csv file</a>'\n return href", "def get_table_download_link(df):\n csv = df.to_csv(index=False)\n b64 = base64.b64encode(\n csv.encode()\n ).decode() # some strings <-> bytes conversions necessary here\n return f'<a href=\"data:file/csv;base64,{b64}\" download=\"animalquery.csv\">Download csv file</a>'", "def get_table_download_link(df):\n csv = df.to_csv(index=False)\n b64 = base64.b64encode(csv.encode()).decode() # some strings <-> bytes conversions necessary here\n return f'<a href=\"data:file/csv;base64,{b64}\">Download csv file</a>'", "def get_table_download_link(df):\n csv = df.to_csv(index=False)\n # some strings <-> bytes conversions necessary here\n b64 = base64.b64encode(csv.encode()).decode()\n href = f'<a href=\"data:file/csv;base64,{b64}\">Right click and download csv file</a>'\n return href", "def get_table_download_link(df):\n csv = df.to_csv(index=False)\n # some strings <-> bytes conversions necessary here\n b64 = base64.b64encode(csv.encode()).decode()\n href = f'<a href=\"data:file/csv;base64,{b64}\" download=\"download.csv\">Download csv file</a>'\n return href", "def get_table_download_link(df):\n val = to_excel(df)\n b64 = base64.b64encode(val) # val looks like b'...'\n return f'<a href=\"data:application/octet-stream;base64,{b64.decode()}\" download=\"extract.xlsx\">Download csv file</a>' # decode b'abc' => abc", "def get_table_download_link(df):\n csv = df.to_csv(index=False)\n b64 = base64.b64encode(csv.encode()).decode() # some strings <-> bytes conversions necessary here\n return f'<a href=\"data:file/csv;base64,{b64}\" download=\"answers.csv\">Download answers.csv</a>'", "def get_table_download_link(df):\n csv = df.to_csv(index=False)\n b64 = base64.b64encode(\n csv.encode()\n ).decode() # some strings <-> bytes conversions necessary here\n return f'<a href=\"data:file/csv;base64,{b64}\" download=\"pattern.csv\">Download csv file</a>'", "def get_table_download_link(df):\n val = to_excel(df)\n b64 = base64.b64encode(val) # val looks like b'...'\n return f'<a href=\"data:application/octet-stream;base64,{b64.decode()}\" download=\"extract.xlsx\">Download csv file</a>' # decode b'abc' => abc", "def filedownload(df,filename): \n filename=filename + \".csv\"\n csv = df.to_csv(index=False)\n b64 = base64.b64encode(csv.encode()).decode() # strings <-> bytes conversions\n href = f'<a href=\"data:file/csv;base64,{b64}\" download=%s>Download %s File</a>'%(filename,filename)\n return href", "def get_table_download_link(df):\r\n\tval = to_excel(df)\r\n\tb64 = base64.b64encode(val) # val looks like b'...'\r\n\treturn f'<a href=\"data:application/octet-stream;base64,{b64.decode()}\" download=\"extract.xlsx\">Download xlsx file</a>' # decode b'abc' => abc\r", "def get_table_download_link(df):\n val = to_excel(df)\n b64 = base64.b64encode(val) # val looks like b'...'\n return f'<a href=\"data:application/octet-stream;base64,{b64.decode()}\" download=\"Your_File.xlsx\">Download Excel file</a>' # decode b'abc' => abc", "def get_table_download_link(df, filename='download', message='Download csv result file'):\n csv = df.to_csv(index=False)\n b64 = base64.b64encode(csv.encode()).decode() # some strings <-> bytes conversions necessary here\n href = f'<a href=\"data:file/csv;base64,{b64}\" download=\"{filename}.csv\" >{message}</a>'\n return href", "def download_link(object_to_download, download_filename, download_link_text):\n if isinstance(object_to_download,pd.DataFrame):\n object_to_download = object_to_download.to_csv(index=False)\n\n # some strings <-> bytes conversions necessary here\n b64 = base64.b64encode(object_to_download.encode()).decode()\n\n return f'<a href=\"data:file/csv;base64,{b64}\" download=\"{download_filename}\">{download_link_text}</a>'", "def download(df_shorter,folderName):\n os.mkdir(str(folderName))\n path = os.getcwd()+'\\\\'+str(folderName)+'\\\\'\n #add column with video link generated from IDs\n df_shorter['urls'] = df_shorter['id'].apply(lambda x: generateLinkFromId(x))\n vid_dl = []\n i = 1\n for url in df_shorter['urls']:\n if url != False:\n name = str(i)+'.mp4'\n vid_dl.append(wget.download(url,path+name))#retrun the path of the saved video\n i = i+1\n return vid_dl", "def download_link(object_to_download, download_filename, download_link_text):\n if isinstance(object_to_download, pd.DataFrame):\n object_to_download = object_to_download.to_csv(index=False, sep = ';')\n\n # some strings <-> bytes conversions necessary here\n b64 = base64.b64encode(object_to_download.encode(\"latin1\")).decode()\n\n return f'<a href=\"data:file/txt;base64,{b64}\" download=\"{download_filename}\">{download_link_text}</a>'", "def download_link(object_to_download, download_filename, download_link_text):\n if isinstance(object_to_download,pd.DataFrame):\n object_to_download = object_to_download.to_csv(index=False)\n\n # some strings <-> bytes conversions necessary here\n b64 = base64.b64encode(object_to_download.encode()).decode()\n\n return f'<a href=\"data:file/txt;base64,{b64}\" download=\"{download_filename}\">{download_link_text}</a>'", "def download_link(object_to_download, download_filename, download_link_text):\n if isinstance(object_to_download,pd.DataFrame):\n object_to_download = object_to_download.to_csv(index=False)\n\n # some strings <-> bytes conversions necessary here\n b64 = base64.b64encode(object_to_download.encode()).decode()\n\n return f'<a href=\"data:file/txt;base64,{b64}\" download=\"{download_filename}\">{download_link_text}</a>'", "def download_link(object_to_download, download_filename, download_link_text):\n if isinstance(object_to_download,pd.DataFrame):\n object_to_download = object_to_download.to_csv(index=False)\n\n # some strings <-> bytes conversions necessary here\n b64 = base64.b64encode(object_to_download.encode()).decode()\n\n return f'<a href=\"data:file/txt;base64,{b64}\" download=\"{download_filename}\">{download_link_text}</a>'", "def make_download_url(lender, metro):\n where = \"\"\n if lender:\n where = ''\n count = 0\n if type(lender) is QuerySet:\n for item in lender:\n query = '(agency_code=%s AND respondent_id=\"%s\" AND year=%s)'\n where += query % (item.institution.agency_id, item.institution.respondent_id, item.institution.year)\n count += 1\n if(count < len(lender)):\n where += \"OR\"\n else:\n query = '(agency_code=%s AND respondent_id=\"%s\" AND as_of_year=%s)'\n where += query % (lender.agency_id, lender.respondent_id, lender.year)\n if metro:\n divisions = [div.metdiv for div in\n Geo.objects.filter(\n geo_type=Geo.METDIV_TYPE, cbsa=metro.cbsa, year=metro.year\n ).order_by('cbsa')]\n if divisions:\n where += ' AND msamd IN (\"' + '\",\"'.join(divisions) + '\")'\n else: # no divisions, so just use the MSA\n where += ' AND msamd=\"' + metro.cbsa + '\"'\n\n query = urlencode({\n '$where': where,\n '$limit': 0\n })\n base_url = 'https://api.consumerfinance.gov/data/hmda/slice/'\n return base_url + 'hmda_lar.csv?' + query", "def download_link(object_to_download, download_filename, download_link_text):\n if isinstance(object_to_download, pd.DataFrame):\n object_to_download = object_to_download.to_csv(index=False)\n\n # some strings <-> bytes conversions necessary here\n b64 = base64.b64encode(object_to_download.encode()).decode()\n\n return f'<a href=\"data:file/txt;base64,{b64}\" download=\"{download_filename}\">{download_link_text}</a>'", "def print_allen_url(df):\n display(HTML('<h4>Some url for your convenience</h4>'))\n display(HTML('<p>For Gene summary:</p>'))\n for gene in df['gene'].unique():\n url = df[df['gene'] == gene]['allen_gene_url'].iloc[0]\n text = f'Go to Allen\\'s {gene} summary page.'\n display(HTML(f'<a href=\"{url}\">{text}</a>'))\n display(HTML('<p>For high resolution ISH images viewer:</p>'))\n for _, row in df.iterrows():\n plane, gene, dataset_id, url = row[['plane', 'gene', 'section_data_set_id', 'allen_viewer_url']]\n text = f'Go to Allen\\'s Viewer for {plane} {gene} experiment (id={dataset_id}).'\n display(HTML(f'<a href=\"{url}\">{text}</a>'))\n display(HTML('<p>For 3D expression viewer '\n '(Need to install <a href=\"http://mouse.brain-map.org/static/brainexplorer\">'\n 'Allen Brain Explorer</a>):</p>'))\n for section_data_set_id in df['section_data_set_id'].unique():\n url = df[df['section_data_set_id'] == section_data_set_id]['allen_3d_grid_url'].iloc[0]\n gene = df[df['section_data_set_id'] == section_data_set_id]['gene'].iloc[0]\n text = f'Go to Brain Explorer for {gene} 3D expressions (id={section_data_set_id}).'\n display(HTML(f'<a href=\"{url}\">{text}</a>'))\n return", "def get_table_download_link(xlsx_file):\n \n data = open(xlsx_file, 'rb').read()\n base64_encoded = base64.b64encode(data).decode('UTF-8')\n # b64 = base64.b64encode(csv.encode()).decode() # some strings <-> bytes conversions necessary here\n href = f'<a href=\"data:file/xlsx;base64,{base64_encoded}\" download=\"converted.xlsx\">Download excel file</a>'\n return href", "def download_link(request, job_id, filename):\n template_values = remote_view_util.fill_job_values(request, job_id)\n template_values = remote_view_util.fill_template_values(request, **template_values)\n template_values = catalog_view_util.fill_template_values(request, **template_values)\n template_values['title'] = 'Download area'\n template_values['file_name'] = filename\n return render_to_response('catalog/download_link.html',\n template_values)", "def download_report():\n entities = get_names()\n save_csv(entities)", "def _download_to_df(url, table_name, year, month):\n # Insert the table_name, year and month into the url.\n url = url.format(table=table_name, year=year, month=str(month).zfill(2))\n # Download the file.\n r = requests.get(url)\n if r.status_code != 200:\n raise _MissingData((\"\"\"Requested data for table: {}, year: {}, month: {} \n not downloaded. Please check your internet connection. Also check\n http://nemweb.com.au/#mms-data-model, to see if your requested\n data is uploaded.\"\"\").format(table_name, year, month))\n # Convert the contents of the response into a zipfile object.\n zf = zipfile.ZipFile(io.BytesIO(r.content))\n # Get the name of the file inside the zip object, assuming only one file is zipped inside.\n file_name = zf.namelist()[0]\n # Read the file into a DataFrame.\n data = pd.read_csv(zf.open(file_name), skiprows=1)\n # Discard last row of DataFrame\n data = data[:-1]\n return data", "def download_data(base_url,\n lista_anni,\n lista_inquinanti):\n \n # Inizializziamo la lista dei df ognuno dei quali corrisponde ad un inquinante\n df_template = pd.DataFrame(columns=['jd','h','1','2','3','4','5','6','7','8','9','10','11','13','14','15','16','38','39','40',\n '41','45','47','48','49','55','56','57','60','83','84','85','86','87','Anno','Inquinante'])\n lista_df = [df_template]\n\t\n\t# Per ogni inquinante\n for chimico in lista_inquinanti:\n \t# Per ogni anno\n for anno in lista_anni:\n print('Retrieving {} for year {} from {}'.format(chimico, anno, compose_url(base_url, anno, chimico)))\n \n # Esegui la richiesta\n r = requests.get(compose_url(base_url, anno, chimico))\n\n # Crea il rispettivo dataframe\n df = write_response(r)\n print('{} rows'.format(len(df)))\n\t\t\t\n\t\t\t# Prendi la linea che corrisponde all'header del df\n columns_ = df.iloc[0].index[0]\n \n \"\"\" Individua i nomi delle colonne splittando la stringa che li contiene tutti\n ed escludendo lestringhe vuote ottenute tramite lo split\"\"\"\n clean_columns = [item.strip()\\\n for item in columns_.split(' ')\\\n if len(item)!=0]\n \n # aggiungo le colonne Anno e Inquinante\n columns = clean_columns + ['Anno', 'Inquinante']\n\t\t\t\n list_rows = []\n # Per ogni linea del df\n for line_idx in range(1, len(df)):\n \t\n # Come nel caso precedente splitto la linea per ottenere le diverse celle\n line = df.iloc[line_idx].values[0].strip().split(' ')\n \n # Quindi ottengo la lista delle celle della riga i-th\n raw_line = [item for item in line if len(item)!=0] \n \n # Aggiungiamo le colonne anno e inquinante\n list_rows += [raw_line + [anno, chimico]]\n\t\t\t\n\t\t\t# Definiamo il nuovo dataset \n df_idx = pd.DataFrame(list_rows, columns=columns)\n \n # Creiamo aggiungiamo alla lista di df da concatenare quello appena creato \n lista_df += [df_idx]\n\n\t# Facciamo la union dei df tenendo conto che le colonne possono essere diverse (concat con pandas)\n df_final = pd.concat(lista_df, ignore_index=False)\n\n # sostituisco i NaN e -999.0 con un valore vuoto\n df_final = df_final.fillna('')\n df_final = df_final.replace(to_replace='-999.0', value='')\n \n return df_final", "def dataframe(self, *args, **kwargs):\n\n try:\n return self.url.generator.dataframe(*args, **kwargs)\n except AttributeError:\n pass\n\n try:\n return self.url.dataframe(*args, **kwargs)\n except AttributeError:\n pass\n\n raise NotImplementedError(\"Url '{}' of type '{}' can't generate a dataframe \".format(self.url, type(self.url)))", "def download(self):\n\n with open(self.dataset_path) as dataset_file:\n dataset = json.load(dataset_file)\n\n path = \"\".join([POST_HIT_PATH, dataset[\"dataset\"][\"data_path\"]])\n if not os.path.exists(path):\n os.makedirs(path)\n\n protocole = dataset[\"dataset\"][\"protocole\"]\n\n download_links = []\n\n for resource in dataset[\"dataset\"][\"resources\"]:\n file_path = \"\".join([path, resource[\"filename\"]])\n\n #Check if the the download link has not been used before (One download link for all)\n if resource[\"download_link\"] not in download_links:\n \n print(\"DOWNLOADING : {}\".format(resource[\"filename\"]))\n f = urllib.request.urlopen(resource[\"download_link\"])\n data = f.read()\n with open(file_path, \"wb\") as donwload_file:\n donwload_file.write(data)\n\n download_links.append(resource[\"download_link\"])\n\n \n #Extract all files from the tar archives if necessary\n if tarfile.is_tarfile(file_path):\n tf = tarfile.open(file_path)\n tf.exractall()", "def download_link(self, obj):\n if obj.cwr:\n url = reverse(\n 'admin:music_publisher_cwrexport_change', args=(obj.id,))\n url += '?download=true'\n return mark_safe('<a href=\"{}\">Download</a>'.format(url))", "def download(self, verbose):\n\n # Download datasets\n if verbose:\n print(\"Retrieving datasets from COVID-19 Open Data by Google Cloud Platform https://github.com/GoogleCloudPlatform/covid-19-open-data\")\n # Index\n i_cols = [\"location_key\", \"country_name\", \"subregion1_name\", \"subregion2_name\", \"iso_3166_1_alpha_3\"]\n i_df = pd.read_csv(self.URL_I, usecols=i_cols)\n # Mobility\n m_df = pd.read_csv(self.URL_M)\n m_df = (m_df.set_index([\"date\", \"location_key\"]) + 100).reset_index()\n # Combine data\n df = m_df.merge(i_df, how=\"left\", on=\"location_key\")\n # Location (country/province)\n df = df.loc[df[\"subregion2_name\"].isna()]\n df[self.PROVINCE] = df[\"subregion1_name\"].fillna(self.UNKNOWN).apply(unidecode)\n df[\"country_name\"] = df[\"country_name\"].replace(\n {\n # CIV\n \"Ivory Coast\": \"Cote d'Ivoire\",\n }\n )\n return df", "def _download_data(link: str, data_path: str, idx: int, total_idx: int) -> None:\n if not os.path.exists(data_path):\n print(\"Downloading data to {}, file: {} / {}\".format(data_path, idx + 1, total_idx))\n wget.download(link, data_path, bar=bar_custom)", "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "def download_url(self, fname):\n if not fname in self.data:\n return ''\n url = '/'.join([\n self.context.absolute_url(),\n '@@download-file',\n self.id + ':' + fname\n ])\n return url", "def download(args):\n with_dataset(args, Dataset._download)", "def download_link(self, handle):\n return None", "def build_retrieve_url(\n ids, display, result=None, download=None, file=None, offset=None,\n length=None, subseq_range=None, expanded=False, header=False\n):\n url = baseUrl + \"data/view/\"\n url += ids\n check_display_option(display)\n url += \"&display=%s\" % (display)\n if result is not None:\n url += \"&result=%s\" % (result)\n if length is not None:\n check_length(length)\n url += \"&length=%s\" % (length)\n if offset is not None:\n url += \"&offset=%s\" % (offset)\n if subseq_range is not None:\n check_subseq_range(subseq_range)\n url += \"&range=%s\" % (subseq_range)\n url += \"&expanded=true\" if expanded else \"&expanded=false\"\n url += \"&header=true\" if header else \"&header=false\"\n if download is not None or file is not None:\n check_download_file_options(download, file)\n url += \"&download=%s\" % (download)\n return url", "def download_data_and_save():\n url = 'https://github.com/djay/covidthailand/wiki/combined.csv'\n s=requests.get(url).content\n global df\n global last_updated\n df=pd.read_csv(io.StringIO(s.decode('utf-8')), parse_dates= ['Date'])\n df.to_parquet(file_name, compression='UNCOMPRESSED')\n df.to_csv('jaydata.csv')\n last_updated = df['Date'][df.index[-1]].strftime(\"%d %B %Y\")\n\n url = 'https://raw.githubusercontent.com/wiki/djay/covidthailand/vaccinations.csv'\n s=requests.get(url).content\n global vac_df\n vac_df=pd.read_csv(io.StringIO(s.decode('utf-8')), parse_dates= ['Date'])\n vac_df.to_parquet('vaccination.parquet', compression='UNCOMPRESSED')\n\n print(\"Data downloaded and saved successfully. Data up to \" + last_updated)", "def build_fullframe_url(row: Any) -> Union[str, None]:\n\n url: str\n if row.Obs.source[:4] == \"neat\":\n path: str\n if row.Obs.source[5:] == \"palomar\":\n path = 'tricam'\n else:\n # Maui GEODSS\n path = 'geodss'\n\n url = '{}/neat/{}/data/{}.fits'.format(\n ENV.CATCH_ARCHIVE_BASE_URL, path,\n '/'.join(row.Obs.productid.lower().split('_')))\n else:\n # full-frame only for NEAT\n return None\n\n return url", "def file_download_link(filename):\n location = f\"/{UPLOAD_DIRECTORY}/{filename}\"\n return html.A(filename, href=location)", "def download_dataset(url=DATASET_URL):\n df = pd.read_csv(url, index_col=0)\n \n # ディレクトリが無ければ,作成する\n if not os.path.isdir(BASE_DIR):\n os.makedirs(BASE_DIR)\n \n df.to_csv(LOCAL_FILE_NAME)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n return response.download(request, db)", "def download():\n\treturn response.download(request, db)", "def createLink(self, downloadUrl, title):\n newUrl = downloadUrl.replace(\"details\", \"download\") \n return self.url + '/' + newUrl", "def download(dataset_revision):\n return reverse('manageDatasets.download', 'microsites.urls',\n kwargs={'dataset_id': str(dataset_revision['dataset_id']), 'slug': dataset_revision['slug']})", "def download(self, verbose):\n # Download datasets\n if verbose:\n print(\"Retrieving datasets from Our World In Data https://github.com/owid/covid-19-data/\")\n # Vaccinations\n v_rec_cols = [\n \"date\", \"location\", \"iso_code\", \"total_vaccinations\", \"people_vaccinated\", \"people_fully_vaccinated\"]\n v_rec_df = pd.read_csv(self.URL_V_REC, usecols=v_rec_cols)\n v_loc_df = pd.read_csv(self.URL_V_LOC, usecols=[\"location\", \"vaccines\"])\n v_df = v_rec_df.merge(v_loc_df, how=\"left\", on=\"location\")\n # Tests\n pcr_rec_cols = [\"ISO code\", \"Date\", \"Daily change in cumulative total\", \"Cumulative total\"]\n pcr_df = pd.read_csv(self.URL_P_REC, usecols=pcr_rec_cols)\n pcr_df = pcr_df.rename(columns={\"ISO code\": \"iso_code\", \"Date\": \"date\"})\n pcr_df[\"cumsum\"] = pcr_df.groupby(\"iso_code\")[\"Daily change in cumulative total\"].cumsum()\n pcr_df = pcr_df.assign(tests=lambda x: x[\"Cumulative total\"].fillna(x[\"cumsum\"]))\n # Combine data (vaccinations/tests)\n df = v_df.set_index([\"iso_code\", \"date\"])\n df = df.combine_first(pcr_df.set_index([\"iso_code\", \"date\"]).loc[:, [\"tests\"]])\n df = df.reset_index()\n # Location (country/province)\n df[\"location\"] = df[\"location\"].replace(\n {\n # COG\n \"Congo\": \"Republic of the Congo\",\n }\n )\n df = df.loc[~df[\"iso_code\"].str.contains(\"OWID_\")]\n df[\"location\"] = df.groupby(\"iso_code\")[\"location\"].bfill()\n df.loc[df[\"location\"] == df[\"iso_code\"], \"location\"] = None\n df.loc[df[\"location\"].isna(), \"location\"] = df.loc[df[\"location\"].isna(), \"iso_code\"].apply(\n lambda x: coco.convert(x, to=\"name_short\", not_found=None))\n df[self.PROVINCE] = self.UNKNOWN\n return df", "def download():\n if auth.has_membership(1):\n user = \"Admin\"\n elif auth.has_membership(2):\n user = \"Examiner\"\n elif auth.has_membership(3):\n user = \"student\"\n elif auth.has_membership(5):\n user = \"Managment\"\n\n db.activity_log.insert( Title_entry=\"Download assignment\", \n referance_id=auth.user.id,\n remarks=\"content downloaded by {}\".format(user))\n db.commit()\n return response.download(request, db)", "def existing_url(**kwargs):\n # Build the format dictionary\n url_base = \"/axapi/v3/export\"\n f_dict = {}\n\n return url_base.format(**f_dict)", "def file_download_link(filename):\n location = \"/download/{}\".format(urlquote(filename))\n return html.A(filename, href=location)", "def downloadFile()-> None:\n logging.info(f\"Downloading current data set {getTime()}\")\n with open(DATA_FILE,\"wb\") as f:\n f.write(get(\"https://covid.ourworldindata.org/data/owid-covid-data.csv\").text.encode())\n logging.info(f\"Finished Downloading current data set {getTime()}\")", "def download():\n \n browser.find_element_by_xpath('//*[@id=\"ctl00_contentPlaceHolder_divAllVariablesPerYear2012\"]/div[2]/div[2]/div[1]/a').click()", "def get_data(self, url):\n # Initialize the button that needs to be pressed to get download the data\n button = None\n # While this button is of type 'None' we reload the browser\n while button is None:\n try:\n # Navigate to the URL\n self.go_to_url(url)\n # Sleep the code by the defined time plus a random number of seconds between 0s and 2s. This should\n # reduce the likelihood that Google detects us as a scraper\n time.sleep(self.sleep + 2 * np.random.rand(1))\n # Try to find the button and click it\n line_chart = self.browser.find_element_by_css_selector(\n \"widget[type='fe_line_chart']\")\n button = line_chart.find_element_by_css_selector(\n '.widget-actions-item.export')\n button.click()\n except exceptions.NoSuchElementException:\n # If the button cannot be found, try again (load page, ...)\n pass\n # After downloading, wait again to allow the file to be downloaded\n time.sleep(self.sleep)\n # Load the data from the csv-file as pandas.DataFrame object\n data = pd.read_csv(self.filename, skiprows=1)\n # Set date as index:\n if 'Day' in data.columns:\n data.Day = pd.to_datetime(data.Day)\n data = data.set_index(\"Day\")\n frequency = 'Daily'\n elif 'Week' in data.columns:\n data.Week = pd.to_datetime(data.Week)\n data = data.set_index(\"Week\")\n frequency = 'Weekly'\n else:\n data.Month = pd.to_datetime(data.Month)\n data = data.set_index(\"Month\")\n frequency = 'Monthly'\n # Sleep again\n time.sleep(self.sleep)\n # Delete the file\n while os.path.exists(self.filename):\n try:\n os.remove(self.filename)\n except:\n pass\n return data, frequency", "def download(site=None,\n sdate=None,\n ndays=1,\n edate=None,\n f_df=None,\n force=False,\n verbose=True):\n\n # get file names\n if f_df is None: \n f_df = list_files(site, sdate, ndays=ndays, edate=edate) \n # download files\n for di, row in f_df.iterrows():\n # get file name and check\n # if it exists\n fn = os.path.join(row['dir'], row['fname'])\n if not os.path.exists(fn) or force:\n # if forcing download and file\n #exists remove file before\n #redownloading\n if os.path.exists(fn):\n os.remove(fn)\n try: \n wget.download(row['hdir']+row['fname'],out=row['dir'])\n except:\n print('HTTP file not found {0}'.format(row['fname']))\n elif verbose:\n print('File {0} exists use force=True to download'.format(row['fname']))", "def getData(constrain):\n\n dat_AGS = chunks(AGS, 100)\n for num, ags_c in enumerate(dat_AGS):\n to_download = DOWNLOAD_LINK.format(ags_id=ags_c, constrain=constrain)\n to_download = to_download.replace(\" \", \"\")\n download_name = \"../Data/Gemeinden/{}-{}.csv\".format(\n constrain, num)\n\n url.urlretrieve(to_download, filename=download_name)\n\n sleep(1) # be nice\n\n return(num)", "def download_dataset(self):\n raise NotImplementedError", "def download1():\n #t=request.vars.arg(0)\n response.flash=request\n #print request.wsgi.environ['HTTP_REFERER']\n #print 'yghklo=',request.args[0]\n a=db(db.Project.Project_File==request.args[0]).select(db.Project.ALL)\n #a=db(db.Project.id==38).select(db.Project.ALL)\n #if a == None:\n#\t print 'silent'\n # print 'a= aabhas download',a[0].no_of_download, a[0].Project_File\n # if a[0].no_of_download==None:\n#\t a[0].no_download=0\n db(db.Project.Project_File==a[0].Project_File).update(no_of_download=(a[0].no_of_download or 0)+1)\n print 'a.id=',a[0].id\n # print len(a),'\\n'\n #print \"\\n\\n\\n\\n\"\n return response.download(request, db)" ]
[ "0.7870279", "0.7813833", "0.770839", "0.7706421", "0.77046764", "0.7687905", "0.7659281", "0.7649181", "0.7642583", "0.76236206", "0.76104516", "0.7604595", "0.7596416", "0.7582703", "0.75675696", "0.7466063", "0.74483395", "0.73935926", "0.72894573", "0.68111026", "0.6701601", "0.6609721", "0.65733653", "0.65733653", "0.65733653", "0.654409", "0.65271467", "0.64464927", "0.64215755", "0.63549614", "0.62308955", "0.6140533", "0.5997186", "0.5965818", "0.5955749", "0.5948824", "0.59286404", "0.58662486", "0.58517957", "0.58517957", "0.58517957", "0.58517957", "0.58517957", "0.58517957", "0.581712", "0.581243", "0.5808143", "0.57992256", "0.57628125", "0.57537", "0.57518756", "0.57489926", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747581", "0.5747136", "0.57330626", "0.5732055", "0.57164997", "0.5712185", "0.5703195", "0.5682414", "0.5681725", "0.566696", "0.56534576", "0.56420434", "0.56334174", "0.5619417", "0.56025237" ]
0.7307365
18
Computes the x coordinate of the ball
def getBallX(state): region = state[93:189, 8:WIDTH-8, 0] nonzero_x_coords = region.nonzero()[1] if len(nonzero_x_coords) > 0: return nonzero_x_coords.mean() return -1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_x(self):\n return self.position.x", "def get_x(self):\n\t\treturn self._collision_rect.x + 14", "def x(self):\r\n return self.position.x", "def get_x(self):\n return self.posX", "def get_pos_x(self):\n return self.__pos_x", "def Getxcoord(self):\n return self.x_coord", "def x(self):\n return self._turtle.xcor()", "def x(self):\n return self._turtle.xcor()", "def get_x_position(self):\n return self.rect.x", "def getXCoordinate(self) -> float:\n return self.x_coord", "def x(self):\n if self._x is None:\n self.compute_coordinates()\n return self._x", "def x(self):\n return self.coords[0]", "def get_x(self):\n return self.coords[0]", "def get_x(self) -> int:\n return self.__x", "def bounce_x(self):\n result = super(GPolygon, self).bounce_x()\n self.move_inc(self.move.x, self.move.y)\n return result", "def x(self):\n return _libsbml.Point_x(self)", "def x_coord(self):\n\n return self.x0 + np.arange(self.nx) * self.dx", "def get_x_position(self):\n return self.actual_coordinates[0]", "def border_box_x(self):\n return self.position_x + self.margin_left", "def getBallPos(self) -> (int,int):\n return self.x, self.y", "def x(self):\n return self._coords[0]", "def find_ball(grid):\n for y in range(Y_ROWS):\n for x in range(X_COLS):\n if grid[x][y] == 4:\n ball_x = x\n\n return ball_x", "def x(self):\n return np.sum(self.bbox, 0)[0] / 2", "def reflect_x(self):\n r_x = self.x\n r_y = self.y *-1\n\n return (Point(r_x,r_y))", "def get_alien_x(self):\n return self.x", "def get_lx(self):\r\n return int(self.dx * self.nx - self.ox)", "def __get_x__(self):\n return self.Direction['x']", "def get_ship_x(self):\n return self.x", "def centerx(self):\n return self.left + self.width / 2", "def x(self) -> int:\n return self.data.x_centre >> 4", "def pos_x(self, *args, **kwargs) -> Any:\n pass", "def getPaddleX(state):\n region = state[190:191, 8:WIDTH-8, 0]\n nonzero_x_coords = region.nonzero()[1]\n assert len(nonzero_x_coords) > 0\n return nonzero_x_coords.mean()", "def reflect_x(self):\n\n return Point(self.x, - self.y)", "def getX(self):\n return self.x", "def x(self) -> int:\n return self._x", "def x(self):\n self._sort_measurements()\n return self._distances*np.cos(self._angles)", "def GetX(self):\r\n\r\n return self._x", "def getX(self):\n return self.position.getX()", "def getX(self):\r\n\t\treturn self._x", "def getX(self):\n return self.__x", "def anchor_x(self):\n return self._anchor_x", "def calculate_position_x(cls, position_x, direction):\n cls.raise_exception_if_invalid_direction(direction)\n return position_x + cls.DIRECTION_MAPPER[direction][0]", "def getX(self):\n return _libsbml.BoundingBox_getX(self)", "def getXPoint(self, x):\n # Find the correct parameter\n t = (x - self.p0.x) / self.d.x\n return self.point(t)", "def x(self):\n return self.x", "def setBallPos(self, x: int, y: int):\n self.x = x\n self.y = y", "def get_virtual_x_position(self):\n x_real = (\n (self.get_x_position() - self.get_origin_x_position()) *\n sin(self.get_origin_direction() * pi / 180)\n )\n y_real = (\n (self.get_y_position() - self.get_origin_y_position()) *\n cos(self.get_origin_direction() * pi / 180)\n )\n return abs(x_real + y_real)", "def collision(self,x):\n return (1.092*x - 171)", "def x ( self ) :\n return self.xvar", "def getPosition(self):\n return self.x", "def origin_x(self):\n return self._origin[0]", "def x_halo(self): \n return self.coords_halo[0]", "def getVelX(self):\n return self.posvel.getX()", "def vel_x(self, *args, **kwargs) -> Any:\n pass", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def update_ball(self):\n\t\tself.ball_x += self.velocity_x\n\t\tself.ball_y += self.velocity_y\n\t\tif self.ball_y < 0:\n\t\t\tself.ball_y = -self.ball_y\n\t\t\tself.velocity_y = -self.velocity_y\n\t\tif self.ball_y > 1:\n\t\t\tself.ball_y = 2 - self.ball_y\n\t\t\tself.velocity_y = -self.velocity_y\n\t\tif self.ball_x < 0:\n\t\t\tself.ball_x = -self.ball_x\n\t\t\tself.velocity_x = -self.velocity_x\n\t\tif self.ball_x < 1:\n\t\t\treturn 0\n\t\tif self.ball_y > self.paddle_y + State.paddle_height or self.ball_y < self.paddle_y:\n\t\t\treturn -1\n\t\tself.ball_x = 2 - self.ball_x\n\t\tself.velocity_x = random.uniform(-0.015, 0.015) - self.velocity_x\n\t\tif abs(self.velocity_x) < 0.03:\n\t\t\tself.velocity_x = 0.03 if self.velocity_x > 0 else -0.03\n\t\tself.velocity_y = random.uniform(-0.03, 0.03) - self.velocity_y\n\t\tself.velocity_x = max(min(self.velocity_x, 1.0), -1.0)\n\t\tself.velocity_y = max(min(self.velocity_y, 1.0), -1.0)\n\t\treturn 1", "def pointcenter(x):\n return point(x)", "def displacement_in_x(FinalPositionInX,ReleaseAngle,ShotDepth):\n\tFootLength = 0.152*Height\n\tBallRadius = 11.9\n\tBallDisplacementInX = BallRadius*cos(ReleaseAngle)\n\tHoopRadius = 22.9\n\tChangeInX = ShotDepth + FootLength - FinalPositionInX - BallDisplacementInX - HoopRadius\n\treturn(ChangeInX/100.)", "def getXVelocity(self):\n return self.xvelocity", "def get_origin_x_position(self):\n return self.origin_coordinates[0]", "def get_xpos(self, body_index):\n xpos_x_func = self.wrapper.get_xpos_x\n xpos_y_func = self.wrapper.get_xpos_y\n xpos_x_func.restype = ctypes.c_double\n xpos_y_func.restype = ctypes.c_double\n xpos_x = xpos_x_func(self.instance, body_index)\n xpos_y = xpos_y_func(self.instance, body_index)\n\n return xpos_x, xpos_y", "def get_axis_x(self):\r\n return self.__x_axis", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def getX(self):\n return self.position[0]", "def findX(self):\n return self.x", "def points_at_x(self, x):\n x, = modp(self.p, x)\n rhs = x ** 3 + x * self.a + self.b\n y = rhs.sqrt()\n return point.xy(int(x), int(y)), point.xy(int(x), -int(y))", "def get_stage_x(self):\n raise NotImplementedError", "def set_x(self, state_value):\n val = state_value / self.space_subdivisions + self.unit\n epsilon = 1e-6\n if not self.unit <= val <= 1.0 - self.unit + epsilon:\n raise AttributeError(\"Value out of bounds\")\n self.pos_x = val", "def content_box_x(self):\n return self.position_x + self.margin_left + self.padding_left + \\\n self.border_left_width", "def x_origin(self):\n return self._x_origin", "def get_hoop_location(ball_loc_x):\n if ball_loc_x > 47:\n hoop_loc_x = 88.65\n hoop_loc_y = 25\n else:\n hoop_loc_x = 5.35\n hoop_loc_y = 25\n\n return [hoop_loc_x, hoop_loc_y]", "def get_speed_x(self):\n return self.__speed_x", "def x(self):\n return (self.__x)", "def y_x(self, x: datetime) -> float:\n return self.point_1_price + self.slope * ((x - self.point_1_moment).total_seconds())", "def getAngVelX(self):\n return self.angvel.getX()" ]
[ "0.7201727", "0.7096933", "0.69620883", "0.69616777", "0.68398833", "0.6825444", "0.68012977", "0.68012977", "0.67890507", "0.67753965", "0.67356485", "0.6712596", "0.67017496", "0.6700496", "0.6699452", "0.6689949", "0.66789895", "0.6650196", "0.6637467", "0.6616907", "0.6615562", "0.6540288", "0.6524539", "0.64988893", "0.6465511", "0.64271134", "0.6426292", "0.64038295", "0.638587", "0.63665366", "0.63570434", "0.62802833", "0.6252275", "0.6248809", "0.6210072", "0.62066656", "0.6136288", "0.6117572", "0.611706", "0.6111804", "0.6108751", "0.610721", "0.60969955", "0.609439", "0.6086491", "0.60776657", "0.607622", "0.60645103", "0.6053267", "0.6049939", "0.6048753", "0.60362923", "0.6035088", "0.6015157", "0.59718853", "0.59718853", "0.59718853", "0.59718853", "0.59718853", "0.59718853", "0.59718853", "0.59718853", "0.59718853", "0.59718853", "0.59718853", "0.59718853", "0.59718853", "0.59718853", "0.59718853", "0.59718853", "0.5948857", "0.5947395", "0.5943209", "0.59391975", "0.593471", "0.59347004", "0.5914713", "0.59078276", "0.59078276", "0.59078276", "0.59078276", "0.59078276", "0.59078276", "0.59078276", "0.59078276", "0.59078276", "0.59078276", "0.59078276", "0.59015346", "0.58772886", "0.5828364", "0.58159083", "0.57990694", "0.5763222", "0.57499146", "0.5748686", "0.574681", "0.5740909", "0.57389927", "0.5738441" ]
0.7251717
0
Computes the x coordinate of the paddle.
def getPaddleX(state): region = state[190:191, 8:WIDTH-8, 0] nonzero_x_coords = region.nonzero()[1] assert len(nonzero_x_coords) > 0 return nonzero_x_coords.mean()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_x(self):\n return self.position.x", "def get_x(self):\n return self.posX", "def x(self):\r\n return self.position.x", "def get_pos_x(self):\n return self.__pos_x", "def get_x_position(self):\n return self.rect.x", "def get_alien_x(self):\n return self.x", "def get_x(self):\n\t\treturn self._collision_rect.x + 14", "def Getxcoord(self):\n return self.x_coord", "def get_x(self) -> int:\n return self.__x", "def getXCoordinate(self) -> float:\n return self.x_coord", "def get_ship_x(self):\n return self.x", "def x_coord(self):\n\n return self.x0 + np.arange(self.nx) * self.dx", "def pos_x(self, *args, **kwargs) -> Any:\n pass", "def get_x_position(self):\n return self.actual_coordinates[0]", "def find_paddle(grid):\n for x in range(X_COLS):\n if grid[x][CURSOR_ROW] == 3:\n paddle_x = x\n\n return paddle_x", "def x(self):\n if self._x is None:\n self.compute_coordinates()\n return self._x", "def x(self):\n return _libsbml.Point_x(self)", "def get_x(self):\n return self.coords[0]", "def border_box_x(self):\n return self.position_x + self.margin_left", "def x(self):\n return self.coords[0]", "def paddle_reset_position(self, mouse):\n if (0 + self.paddle.width / 2) <= mouse.x <= (self.window.width - self.paddle.width / 2):\n self.paddle_x = mouse.x - self.paddle.width / 2\n self.window.add(self.paddle, self.paddle_x, self.paddle_y)", "def __get_x__(self):\n return self.Direction['x']", "def getX(self):\n return self.position.getX()", "def x(self):\n return self._coords[0]", "def x(self) -> int:\n return self._x", "def x(self) -> int:\n return self.data.x_centre >> 4", "def centerx(self):\n return self.left + self.width / 2", "def get_lx(self):\r\n return int(self.dx * self.nx - self.ox)", "def getX(self):\n return self.x", "def __init__(self):\n self.center = Point()\n #x coordinate is set in these amount of pixels to leave a slight gap between the screen and paddle just like in real pong video games\n self.center.x = SCREEN_WIDTH - 10\n #when game starts, paddle is placed on the middle of screen's right edge\n self.center.y = SCREEN_HEIGHT / 2", "def paddle_moving(self, mouse):\n # when the paddle is in the window\n if 0 + self.paddle.width/2 <= mouse.x <= self.window.width - self.paddle.width/2:\n self.paddle.x = mouse.x - self.paddle.width / 2\n\n # when the paddle is about to leave the left side of the window\n elif mouse.x < 0 + self.paddle.width/2:\n self.paddle.x = 0\n\n # when the paddle is about to leave the right side of the window\n elif mouse.x > self.window.width - self.paddle.width/2:\n self.paddle.x = self.window.width - self.paddle.width\n\n # the paddle's y coordinate will always be at the same as below\n self.paddle.y = self.window.height - self.paddle_offset", "def padding_box_x(self):\n return self.position_x + self.margin_left + self.border_left_width", "def reflect_x(self):\n\n return Point(self.x, - self.y)", "def getXPoint(self, x):\n # Find the correct parameter\n t = (x - self.p0.x) / self.d.x\n return self.point(t)", "def pos_left(self, x=1):\n\n self.x -= x\n return self.pos(self.x, self.y)", "def content_box_x(self):\n return self.position_x + self.margin_left + self.padding_left + \\\n self.border_left_width", "def x(self):\n return self._turtle.xcor()", "def x(self):\n return self._turtle.xcor()", "def GetX(self):\r\n\r\n return self._x", "def getX(self):\r\n\t\treturn self._x", "def handle_pygame_event(self, event):\n if event.type != MOUSEMOTION:\n # nothing to do\n return\n self.model.paddle.x = event.pos[0]-self.model.paddle.width/2.0", "def getX(self):\n return self.__x", "def reflect_x(self):\n r_x = self.x\n r_y = self.y *-1\n\n return (Point(r_x,r_y))", "def x ( self ) :\n return self.xvar", "def getPosition(self):\n return self.x", "def getBallX(state):\n region = state[93:189, 8:WIDTH-8, 0]\n nonzero_x_coords = region.nonzero()[1]\n if len(nonzero_x_coords) > 0:\n return nonzero_x_coords.mean()\n return -1", "def getX(self):\n return self.position[0]", "def center_horizontal_paddle(self):\n self.top_center = self.screen_rect.centerx - (self.screen_rect.centerx/2)\n self.bot_center = self.screen_rect.centerx - (self.screen_rect.centerx/2)", "def offset_x(self, x: int):\n self.tk_ref.geometry(f'{self.width}x{self.height}+{x}+{self.offset_y}')", "def set_x(self, state_value):\n val = state_value / self.space_subdivisions + self.unit\n epsilon = 1e-6\n if not self.unit <= val <= 1.0 - self.unit + epsilon:\n raise AttributeError(\"Value out of bounds\")\n self.pos_x = val", "def pupil_left_coords(self):\n if self.pupils_located:\n x = self.eye_left.origin[0] + self.eye_left.pupil.x + self.x_add\n y = self.eye_left.origin[1] + self.eye_left.pupil.y + self.y_add\n return (x, y)", "def getVelX(self):\n return self.posvel.getX()", "def get_stage_x(self):\n raise NotImplementedError", "def calculate_position_x(cls, position_x, direction):\n cls.raise_exception_if_invalid_direction(direction)\n return position_x + cls.DIRECTION_MAPPER[direction][0]", "def GetNewTrackX(self):\n x = 0\n for i in range(len(self.trackFrames)):\n #self.trackFrames[i].move(x, 0)\n x += self.trackFrames[i].width()\n return x", "def increase_x(self, state_amount):\n pos_amount = state_amount / self.space_subdivisions\n self.pos_x = (self.pos_x + pos_amount) % 1.0", "def x(self):\n return self.x", "def bounce_x(self):\n result = super(GPolygon, self).bounce_x()\n self.move_inc(self.move.x, self.move.y)\n return result", "def anchor_x(self):\n return self._anchor_x", "def offset_x(self) -> int:\n self.tk_ref.update()\n return self.tk_ref.winfo_x()", "def left_distance(self):\n return self.x", "def getBallPos(self) -> (int,int):\n return self.x, self.y", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self):\n return self._x", "def x(self, value: int):\n if not (0 < value < SCREEN_WIDTH - self.width):\n self.dir_x = -self.dir_x\n self._x += abs(self._x - value) * self.dir_x", "def x(self):\n return np.sum(self.bbox, 0)[0] / 2", "def draw_horizontal_paddle(self):\n pygame.draw.rect(self.screen, self.color, self.top_rect)\n pygame.draw.rect(self.screen, self.color, self.bot_rect)", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def x(self):\n return self.__x", "def __set_paddle_position(self):\n self.__window.remove(self.__paddle)\n self.__window.add(self.__paddle, (self.__window.width - self.__paddle.width) / 2,\n self.__window.height - self.__paddle_offset)", "def xAt(self, col):\n\n return self.bottomBoard.x + self.bottomBoard.xAt(col)", "def __init__(self, x, y):\r\n super(paddle, self).__init__(image=paddle.paddle2, x=x, y=y)\r\n self.points=games.Text(value=0, size=50, color=color.white, top=5, right=games.screen.width-5)\r\n games.screen.add(self.points)", "def get_axis_x(self):\r\n return self.__x_axis", "def origin_x(self):\n return self._origin[0]", "def unit_x(cls):\n return cls(1, 0, 0)", "def unit_x(cls):\n return cls(1, 0)", "def getX(self):\n return self.components[0]" ]
[ "0.7278048", "0.71396947", "0.70126045", "0.6951281", "0.69439006", "0.6811559", "0.6754752", "0.6747178", "0.674097", "0.6727758", "0.6699524", "0.6672045", "0.6648935", "0.66291857", "0.6610564", "0.6605478", "0.65597486", "0.65584433", "0.6536112", "0.6401617", "0.6386279", "0.63645905", "0.63285595", "0.63133645", "0.63115716", "0.63006896", "0.62842196", "0.62591314", "0.6196046", "0.6194936", "0.6173153", "0.61719793", "0.6154424", "0.615262", "0.6140157", "0.61204296", "0.60861015", "0.60861015", "0.60856014", "0.60779333", "0.6065898", "0.60632175", "0.60603565", "0.6055482", "0.60114855", "0.59799", "0.59712327", "0.59612495", "0.5954498", "0.5941701", "0.5912816", "0.5909119", "0.59089434", "0.5905913", "0.5904752", "0.5900098", "0.5897761", "0.58501637", "0.5849686", "0.5845103", "0.58333904", "0.5823636", "0.58078027", "0.58078027", "0.58078027", "0.58078027", "0.58078027", "0.58078027", "0.58078027", "0.58078027", "0.58078027", "0.58078027", "0.58078027", "0.58078027", "0.58078027", "0.58078027", "0.58078027", "0.58078027", "0.58060384", "0.57848454", "0.5772661", "0.57549745", "0.57549745", "0.57549745", "0.57549745", "0.57549745", "0.57549745", "0.57549745", "0.57549745", "0.57549745", "0.57549745", "0.57549745", "0.5740007", "0.5735078", "0.5734967", "0.573276", "0.57288533", "0.57208765", "0.57118237", "0.5709314" ]
0.7158066
1
Maps state to an action. Move the paddle to be under the ball.
def getAction(self, state): ball_x = getBallX(state) paddle_x = getPaddleX(state) if ball_x == -1: # if ball not seen, move paddle to middle target_x = (WIDTH - 16)/2 else: target_x = ball_x if target_x < paddle_x: action = LEFT else: action = RIGHT if DEBUG: print "ball_x =", ball_x print "paddle_x =", paddle_x print "target_x =", target_x print "action =", action raw_input() return action
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _move_our_paddle(self, action) -> None:\n if not isinstance(action, int):\n action = action.item() # pops the item if the action is a single tensor\n assert action in [a for a in self.action_meanings.keys()], f\"{action} is not a valid action\"\n if action == self.actions['UP']:\n if self.paddle_r.top_bound < self.top_bound:\n self.paddle_r.up()\n elif action == self.actions['DOWN']:\n if self.paddle_r.bottom_bound > self.bottom_bound:\n self.paddle_r.down()", "def _movePaddle(self):\n self._click()\n self._game.updatePaddle(self._touch)\n self._last = self._touch", "def goToBall(state):\n return goTo(state, state.ball_pos)", "def decide_move(self, action):\n x1, y1 = action['xy1']\n x2, y2 = action['xy2']\n self.__state.push(action)", "def _activate(self):\n # Increase the speed of the ball(s) slightly now the player has the\n # advantage of a wider paddle.\n self.game.paddle.transition(WideState(self.game.paddle))\n for ball in self.game.balls:\n ball.base_speed += 1", "def move(self, action): # Good\n if action == 0:\n dx, dy = 0, 1\n elif action == 1:\n dx, dy = 1, 0\n elif action == 2:\n dx, dy = 0, -1\n elif action == 3:\n dx, dy = -1, 0\n else:\n dx, dy = 0, 0\n\n # Check for max speed\n if ((self.vel_x + dx)**2 + (self.vel_y + dy)**2) \\\n <= self.max_speed_sq:\n self.x_vel += dx\n self.y_vel += dy\n\n self.prev_pos = self.center\n super(Player, self).move()", "def move(self, action: Action) -> State:\n new_state = State(self.size_h, self.size_v, self.wall_squares, self.boxes, self.storage_locations,\n self.current_location, action)\n\n if action == Action.DOWN:\n down_loc = (new_state.current_location[0] + 1, new_state.current_location[1])\n two_away = (down_loc[0] + 1, down_loc[1])\n new_state.current_location = down_loc\n if down_loc in new_state.boxes:\n new_state.boxes.remove(down_loc)\n new_state.boxes.append(two_away)\n\n elif action == Action.UP:\n up_loc = (new_state.current_location[0] - 1, new_state.current_location[1])\n two_away = (up_loc[0] - 1, up_loc[1])\n new_state.current_location = up_loc\n if up_loc in new_state.boxes:\n new_state.boxes.remove(up_loc)\n new_state.boxes.append(two_away)\n\n elif action == Action.RIGHT:\n right_loc = (new_state.current_location[0], new_state.current_location[1] + 1)\n two_away = (right_loc[0], right_loc[1] + 1)\n new_state.current_location = right_loc\n if right_loc in new_state.boxes:\n new_state.boxes.remove(right_loc)\n new_state.boxes.append(two_away)\n\n elif action == Action.LEFT:\n left_loc = (new_state.current_location[0], new_state.current_location[1] - 1)\n two_away = (left_loc[0], left_loc[1] - 1)\n new_state.current_location = left_loc\n if left_loc in new_state.boxes:\n new_state.boxes.remove(left_loc)\n new_state.boxes.append(two_away)\n\n new_state._validate() # TODO: Remove me for the final product.\n return new_state", "def step(self, p1_action, p2_action):\r\n p1_r = self.paddle_1.update(p1_action)\r\n p2_r = self.paddle_2.update(p2_action)\r\n \r\n p1_state, p2_state, p1_reward, p2_reward, done = self.ball.update(self.paddle_1, self.paddle_2)\r\n\r\n return p1_state, p2_state, (p1_reward+p1_r), (p2_reward+p2_r), done", "def _step_their_paddle(self):\n if random.random() < self.their_update_probability:\n if self.paddle_l.y < self.ball.y:\n if self.paddle_l.top_bound < self.top_bound:\n self.paddle_l.up()\n else:\n if self.paddle_l.bottom_bound > self.bottom_bound:\n self.paddle_l.down()", "def _active(self):\n self._soundhelper()\n k = self._game.getPlayerLives()\n self._movePaddle()\n self._game.moveBall(self._sound)\n if self._game.getPlayerLives() == 0:\n self._state = STATE_COMPLETE\n elif self._game.getPlayerLives() < k:\n self._state = STATE_PAUSED", "def apply_action(self, action):\n robot_state = self.get_state('turtlebot3_waffle_pi','world')\n robot_x = robot_state.pose.position.x\n robot_y = robot_state.pose.position.y\n # Set the distance moved in an action such that it is at least as large as the\n # minimum distance that would let a robot in the middle of the goal go to either side\n #self.move_dist = max(((C.GOAL_TOP + C.GOAL_BOTTOM) / 2) / C.NUM_POS_SENDS, 0.5)\n if action == Learn.MOVE_LEFT:\n print(\"Move left\")\n self.set_robot(robot_x, robot_y+self.move_dist)\n elif action == Learn.MOVE_RIGHT:\n print(\"Move right\")\n self.set_robot(robot_x, robot_y-self.move_dist)\n else:\n print(\"Stay put\")", "def hit_paddle(self):\n pass\n\n #Implement if collision with paddle is detected\n\n #Add randomness to how ball direction will change and return value", "def move(self, state):\n raise NotImplementedError(\"Need to implement this method\")", "def ball_move(self):\n self.ball.move(self.__dx, self.__dy)", "def move(self, action):\n \n self.counter += 1\n\n if action not in self.ACTIONS:\n raise Exception(\"Invalid action\")\n\n \n\n d_x, d_y = self.MOVEMENTS[action]\n x, y = self.position\n new_x, new_y = x + d_x, y + d_y\n new_X,new_Y=self.position_to_xy(new_x, new_y)\n \n\n if (new_x, new_y) not in self.cases:\n return self._get_state(), -3, False, self.ACTIONS\n \n \n \n elif (self.openGoal(new_x,new_y))&(new_X>-400):\n self.position = new_x, new_y\n self.positionxy = self.position_to_xy(new_x, new_y)\n \n return self._get_state(), 20, True, self.ACTIONS\n \n # elif not self.openGoal(new_x,new_y):\n # self.position = new_x, new_y\n # self.positionxy = self.position_to_xy(new_x, new_y)\n # return self._get_state(), -1, False, self.ACTIONS\n \n elif self.counter > 100:\n self.position = new_x, new_y\n self.positionxy = self.position_to_xy(new_x, new_y)\n return self._get_state(), -1, True, self.ACTIONS\n \n else:\n self.position = new_x, new_y\n self.positionxy = self.position_to_xy(new_x, new_y)\n return self._get_state(), -1, False, self.ACTIONS", "def _moveBolt(self,input,view,dt):\n if input.is_key_down('up') and self._isPlayerBolt()==True:\n if self._ship != None:\n self._createPlayerBolt(input,dt)\n velocity = BOLT_SPEED\n self._velocity = velocity", "def handle_pygame_event(self, event):\n if event.type != MOUSEMOTION:\n # nothing to do\n return\n self.model.paddle.x = event.pos[0]-self.model.paddle.width/2.0", "def action(ch,level):\r\n # quit the game\r\n if ch == 'q':\r\n myPlayer.GameOver(QUIT)\r\n \r\n # move the paddle right\r\n if ch == 'd':\r\n myPaddle.moveRight(myGrid.getGrid())\r\n \r\n if level==LEVELS:\r\n myBoss.moveRight(myGrid.getGrid(),balls)\r\n \r\n # move the paddle left\r\n elif ch == 'a':\r\n myPaddle.moveLeft(myGrid.getGrid())\r\n \r\n if level==LEVELS:\r\n myBoss.moveLeft(myGrid.getGrid(),balls)\r\n \r\n # release the ball, if on paddle\r\n if ch == ' ':\r\n for ball in balls:\r\n if ball.isOnPaddle():\r\n ball.release(myPaddle)\r\n \r\n # skip to the next level\r\n if ch == 'x':\r\n if level<LEVELS:\r\n return True\r\n elif level==LEVELS:\r\n myPlayer.GameOver(QUIT)\r\n return False", "def move_up(self):\n #if user moves paddle right on top of screen, they won't be able to move it more upwards by using this if statement\n #SCREEN_HEIGHT - 20 = Exact number of pixels where paddle can stop exactly on top edge but still has its body fully shown\n if self.center.y < SCREEN_HEIGHT - 20:\n self.center.y += MOVE_AMOUNT", "def move(self, action):\n \n currentState = self.state\n\n if action == \"up\":\n newState = (self.state[0] - 1, self.state[1])\n elif action == \"down\":\n newState = (self.state[0] + 1, self.state[1])\n elif action == \"right\":\n newState = (self.state[0], self.state[1] + 1)\n elif action == \"left\":\n newState = (self.state[0], self.state[1] - 1)\n else:\n raise NameError(action, 'This is not a valid action!')\n\n # Need to check if the new state is a legal move\n if (newState[0] >= 0) & (newState[0] <= 1) & (newState[1] >= 0) & (newState[1] <= 2):\n return newState\n else:\n print('This move takes you off the board, you have not moved!')\n return currentState", "def updatePaddle(self, selfinput):\n assert isinstance(selfinput,GInput)\n position = 0\n \n if selfinput.is_key_down('right'):\n position = 5\n if selfinput.is_key_down('left'):\n position = -5\n \n self._paddle.move(position)", "def act(self, action):\n\n self.get_frame(int(self.t))\n\n self.position = np.zeros((self.grid_size, self.grid_size))\n\n self.position[0:2,:]= -1\n self.position[:,0:2] = -1\n self.position[-2:, :] = -1\n self.position[:, -2:] = -1\n\n self.position[self.x, self.y] = 1\n if action == 0:\n if self.x == self.grid_size-3:\n self.x = self.x-1\n else:\n self.x = self.x + 1\n elif action == 1:\n if self.x == 2:\n self.x = self.x+1\n else:\n self.x = self.x-1\n elif action == 2:\n if self.y == self.grid_size - 3:\n self.y = self.y - 1\n else:\n self.y = self.y + 1\n elif action == 3:\n if self.y == 2:\n self.y = self.y + 1\n else:\n self.y = self.y - 1\n else:\n RuntimeError('Error: action not recognized')\n\n self.t = self.t + 1\n reward = self.board[self.x, self.y]\n \n self.hunter_move()\n removals = []\n for i in range(len(self.h_x)):\n if self.x == self.h_x[i] and self.y == self.h_y[i]:\n reward -= 100\n removals.append(i)\n \n for i in sorted(removals, reverse=True):\n del self.h_x[i]\n del self.h_y[i]\n \n self.board[self.x, self.y] = 0\n self.board_with_hunters[:,:] = 0\n \n for i in range(len(self.h_x)):\n self.board_with_hunters[self.h_x[i],self.h_y[i]] = -100\n \n self.trajectory[self.x,self.y] = 1\n game_over = self.t > self.max_time\n state = np.concatenate((self.board.reshape(self.grid_size, self.grid_size,1),\n self.position.reshape(self.grid_size, self.grid_size,1),\n self.trajectory.reshape(self.grid_size, self.grid_size,1),\n self.board_with_hunters.reshape(self.grid_size, self.grid_size,1)),axis=2)\n state = state[self.x-2:self.x+3,self.y-2:self.y+3,:]\n\n return state, reward, game_over", "def move_ball():\n print(\"Current position: ({},{}). \"\n \"Direction: ({},{}). Value: {}\".format(shared.ball_yy, shared.ball_xx,\n shared.direction[0], shared.direction[1],\n map_data[shared.ball_yy][shared.ball_xx]))\n if does_apply_direction():\n shared.ball_yy += shared.direction[0]\n shared.ball_xx += shared.direction[1]\n else:\n pass\n # shared.ball_yy = shared.ball_yy + shared.direction[0] \\\n # if default_positions.get(collision)[0] == None else default_positions.get(collision)[0]\n # shared.ball_xx = shared.ball_xx + shared.direction[1] \\\n # if default_positions.get(collision)[1] == None else default_positions.get(collision)[1]", "def tick():\n move_balls(targets_speed)\n move_super_balls(targets_speed * 2)", "def __move_ball(self):\n while not self.__game_is_over():\n self.__ball.move(self.__dx, self.__dy)\n self.__handle_wall_collision()\n if self.__num_lives == 0:\n self.__game_over_picture()\n break\n elif self.__bricks_total == 0:\n self.__game_over_picture('You Win!!')\n break\n pause(FRAME_RATE)", "def _move_pillman(self, action):\n action += 1 # our code is 1 based\n pos = self.world_state['pillman']['pos']\n pillman = self.world_state['pillman']\n update_2d_pos(self.map, pos, action, pos)\n if self.world_state['food'][pos[0]][pos[1]] == 1:\n self._get_food(pos[0], pos[1])\n for i, pill in enumerate(self.world_state['pills']):\n pos = pill['pos']\n if pos[0] == pillman['pos'][0] and pos[1] == pillman['pos'][1]:\n self._get_pill(i)\n break", "def move_ball(self):\r\n self.canvas.move(self.ball, (self.x_speed * self.speed), (self.y_speed * self.speed))\r\n (leftPos, topPos, rightPos, bottomPos) = self.canvas.coords(self.ball)\r\n if leftPos <= 0 or rightPos >= 400:\r\n self.x_speed = -self.x_speed\r\n if topPos <= 0 or bottomPos >= 400:\r\n self.y_speed = -self.y_speed", "def primitive_action(game, action):\n if action == \"north\":\n game.move(NORTH)\n elif action == \"south\":\n game.move(SOUTH)\n elif action == \"east\":\n game.move(EAST)\n elif action == \"west\":\n game.move(WEST)\n elif action == \"chop\":\n game.chop()\n elif action == \"harvest\":\n game.harvest()\n elif action == \"deposit\":\n game.deposit()\n\n return game", "def move_ball(self, from_point, to_point):\n color = self.grid.cells[from_point].ball_color\n self.grid.cells[to_point].place_ball(color)\n self.grid.cells[from_point].button.get_child().destroy()\n self.grid.cells[from_point].is_ball = False\n self.grid.cells[from_point].ball_color = None\n # sprawdzamy czy jest 5 kul w danej orientacji\n self.grid.check_balls()\n # sprawdzamy czy uzytkownik nie zapelnił całej planszy\n self.if_player_lose()\n # losujemy i ustawiamy kolejne kule\n self.grid.place_balls(BALLS_PER_CLICK)\n # sprawdzamy czy jest 5 kul w danej orientacji\n self.grid.check_balls()", "def doAction(self, gameState, action):\n self.lastState = gameState\n self.lastAction = action", "def handle_state(self):\r\n if self.state == 'walk':\r\n self.walking()\r\n elif self.state == 'fall':\r\n self.falling()\r\n elif self.state == 'jumped on':\r\n self.jumped_on()\r\n elif self.state == 'shell slide':\r\n self.shell_sliding()\r\n elif self.state == 'death jump':\r\n self.death_jumping()", "def paddle_moving(self, mouse):\n # when the paddle is in the window\n if 0 + self.paddle.width/2 <= mouse.x <= self.window.width - self.paddle.width/2:\n self.paddle.x = mouse.x - self.paddle.width / 2\n\n # when the paddle is about to leave the left side of the window\n elif mouse.x < 0 + self.paddle.width/2:\n self.paddle.x = 0\n\n # when the paddle is about to leave the right side of the window\n elif mouse.x > self.window.width - self.paddle.width/2:\n self.paddle.x = self.window.width - self.paddle.width\n\n # the paddle's y coordinate will always be at the same as below\n self.paddle.y = self.window.height - self.paddle_offset", "def result(self, state, action):\n \"*** YOUR CODE HERE ***\"\n new_x, new_y, new_heading = state\n if action == 'Forward':\n if state[2] == 0: new_y = state[1] + 1 #Forward North\n if state[2] == 1: new_x = state[0] - 1 #Forward West\n if state[2] == 2: new_y = state[1] - 1 #Forward South\n if state[2] == 3: new_x = state[0] + 1 #Forward East \n elif action == 'TurnLeft':\n if state[2] == 0: new_heading = 1 #Turn left to face West\n if state[2] == 1: new_heading = 2 #Turn left to face South \n if state[2] == 2: new_heading = 3 #Turn left to face East\n if state[2] == 3: new_heading = 0 #Turn left to face North\n elif action == 'TurnRight':\n if state[2] == 0: new_heading = 3 #Turn to face East\n if state[2] == 1: new_heading = 0 #Turn to face South\n if state[2] == 2: new_heading = 1 #Turn to face West\n if state[2] == 3: new_heading = 2 #Turn to face North\n new_state = (new_x,new_y,new_heading)\n return new_state", "def result(self, state, action):\n \"*** YOUR CODE HERE ***\"\n new_x, new_y, new_heading = state\n if action == 'Forward':\n if state[2] == 0: new_y = state[1] + 1 #Forward North\n if state[2] == 1: new_x = state[0] - 1 #Forward West\n if state[2] == 2: new_y = state[1] - 1 #Forward South\n if state[2] == 3: new_x = state[0] + 1 #Forward East \n elif action == 'TurnLeft':\n if state[2] == 0: new_heading = 1 #Turn left to face West\n if state[2] == 1: new_heading = 2 #Turn left to face South \n if state[2] == 2: new_heading = 3 #Turn left to face East\n if state[2] == 3: new_heading = 0 #Turn left to face North\n elif action == 'TurnRight':\n if state[2] == 0: new_heading = 3 #Turn to face East\n if state[2] == 1: new_heading = 0 #Turn to face South\n if state[2] == 2: new_heading = 1 #Turn to face West\n if state[2] == 3: new_heading = 2 #Turn to face North\n new_state = (new_x,new_y,new_heading)\n return new_state", "def updatePaddle(self, touch):\n #first click\n if (touch != None and self._last == None):\n self._clickdist = touch.x - self._paddle.x\n \n #click hold - paddle movement\n if (self._last != None and touch != None):\n self._paddle.x = touch.x - self._clickdist\n \n #prevent paddle from extending past right edge\n if self._paddle.x > (GAME_WIDTH - PADDLE_WIDTH):\n self._paddle.x = GAME_WIDTH - PADDLE_WIDTH\n \n #prevent paddle from extending past left edge\n if self._paddle.x < 0:\n self._paddle.x = 0\n self._last = touch", "def transitionState(self, state, action):\n x, y = state\n dV_x, dV_y = self.convertActionToVelocity(action)\n\n # Updating velocities with probability 1-no_action_prob\n # if the update does not result in both velocities equal to 0\n r = random.uniform(0, 1)\n if r > self.no_action_prob:\n if not (self.Vx + dV_x <= 0 and self.Vy + dV_y <= 0):\n self.Vx = max(0, min(5, self.Vx + dV_x))\n self.Vy = max(0, min(5, self.Vy + dV_y))\n\n new_x = int(x + self.Vx)\n new_y = int(y + self.Vy)\n\n # Check if car has crossed a boundary\n for p, q in self.borders:\n if self.doIntersect(Point(x, y), Point(new_x, new_y), p, q):\n self.Vx = 0\n self.Vy = 0\n return -1, (random.randint(0, L1), 0), False\n\n # Check if car has crossed finish line\n if self.doIntersect(Point(x, y), Point(new_x, new_y), self.finish_p, self.finish_q):\n self.Vx = 0\n self.Vy = 0\n return -1, (random.randint(0, L1), 0), True\n\n return -1, (new_x, new_y), False", "def action_handler(self):\n if self.state == data.DEAD:\n return\n\n x = 0\n for check in self.state_chart[self.state]:\n if not check:\n x += 1\n continue\n elif check():\n self.state = x\n\n # Some messages when state changes\n if self.state == data.CHASE:\n self.handler.message_box.add_msg(\"{} sees you!\".format(self.name), \n data.COLOURS['mob_behaviour_text'])\n elif self.state == data.RUN:\n self.handler.message_box.add_msg(\"{} runs away!\".format(self.name), \n data.COLOURS['mob_behaviour_text'])\n\n x += 1\n\n if self.state == data.HOLD:\n return\n elif self.state == data.CHASE:\n self.chase(self.handler.player)\n elif self.state == data.RUN:\n self.run(self.handler.player)", "def update_ball(self):\n\t\tself.ball_x += self.velocity_x\n\t\tself.ball_y += self.velocity_y\n\t\tif self.ball_y < 0:\n\t\t\tself.ball_y = -self.ball_y\n\t\t\tself.velocity_y = -self.velocity_y\n\t\tif self.ball_y > 1:\n\t\t\tself.ball_y = 2 - self.ball_y\n\t\t\tself.velocity_y = -self.velocity_y\n\t\tif self.ball_x < 0:\n\t\t\tself.ball_x = -self.ball_x\n\t\t\tself.velocity_x = -self.velocity_x\n\t\tif self.ball_x < 1:\n\t\t\treturn 0\n\t\tif self.ball_y > self.paddle_y + State.paddle_height or self.ball_y < self.paddle_y:\n\t\t\treturn -1\n\t\tself.ball_x = 2 - self.ball_x\n\t\tself.velocity_x = random.uniform(-0.015, 0.015) - self.velocity_x\n\t\tif abs(self.velocity_x) < 0.03:\n\t\t\tself.velocity_x = 0.03 if self.velocity_x > 0 else -0.03\n\t\tself.velocity_y = random.uniform(-0.03, 0.03) - self.velocity_y\n\t\tself.velocity_x = max(min(self.velocity_x, 1.0), -1.0)\n\t\tself.velocity_y = max(min(self.velocity_y, 1.0), -1.0)\n\t\treturn 1", "def getTransitionStatesAndProbs(self, state, action = None):\n # may move ball\n ball = state[0][:2]\n if action == None:\n ballVelocity = state[0][2:]\n else:\n ballVelocity = (0, 0)\n\n keepers = list(self.getKeepers(state))\n takers = list(self.getTakers(state))\n\n chasers = sorted(keepers, key=lambda keeper: util.getPointVectorDistance(keeper, ball, ballVelocity))\n # most closest agent, possess the ball, or go to the ball \n if self.weHaveBall(state):\n # j has the ball, its transition depends on the action\n if action[0] == 'hold':\n pass\n elif action[0] == 'pass':\n # pass the ball to a teammate\n rand = util.randomVector(0.1)\n target = keepers[action[1]]\n diff = util.getDirection(keepers[0], (target[0] + rand[0], target[1] + rand[1]))\n ballVelocity = (self.ballSpeed * diff[0], self.ballSpeed * diff[1])\n else:\n raise Exception('Unknown action')\n else:\n # j should go to the ball\n chasers[0] = self.moveTowards(chasers[0], ball)\n\n # other agents get open for a pass\n for i in xrange(1, len(chasers)):\n # concretely, this agent goes to a least congested place\n chasers[i] = self.moveTowards(chasers[i], self.getLeastCongestedLoc(state, chasers[i]))\n keepers = sorted(chasers, key=lambda keeper: util.getDistance(keeper, ball))\n \n for i in xrange(2):\n takers[i] = self.moveTowards(takers[i], ball)\n for i in xrange(2, len(takers)):\n takers[i] = self.moveTowards(takers[i], keepers[1])\n takers = sorted(takers, key=lambda taker: util.getDistance(taker, keepers[0]))\n \n newBall = (ball[0] + ballVelocity[0], ball[1] + ballVelocity[1],\\\n ballVelocity[0], ballVelocity[1])\n newState = [newBall] + keepers + takers\n return [(tuple(newState), 1)]", "def doAction(self,state,action):\n self.lastState = state\n self.lastAction = action", "def follow_ball(rel_state):\n\n # Transform rel_state string into separate variables\n\n # Split into individual relational mini states\n rel_list = rel_state.split(\" AND \")\n\n # Check whether the ball is present\n ball_present = True if 'b_pre' in rel_list else False\n\n # Check which object is more to the right\n if 'l_x(b,p)' in rel_list:\n right_obj = 'p'\n elif 's_x(b,p)' in rel_list:\n right_obj = 'none'\n elif 'm_x(b,p)' in rel_list:\n right_obj = 'b'\n\n # Check x trajectory of ball\n if 'l_trajx(b2,b1)' in rel_list:\n ball_trajx = 'l'\n elif 's_trajx(b2,b1)' in rel_list:\n ball_trajx = 's'\n elif 'm_trajx(b2,b1)' in rel_list:\n ball_trajx = 'm'\n\n # Implement minimal rule (ignore paddle_traj x and y completely)\n if ball_present:\n if right_obj == 'b':\n if ball_trajx == 'l':\n action = 0\n elif ball_trajx == 's':\n action = 2\n elif ball_trajx == 'm':\n action = 2\n elif right_obj == 'none':\n if ball_trajx == 'l':\n action = 3\n elif ball_trajx == 's':\n action = 0\n elif ball_trajx == 'm':\n action = 2\n elif right_obj == 'p':\n if ball_trajx == 'l':\n action = 3\n elif ball_trajx == 's':\n action = 3\n elif ball_trajx == 'm':\n action = 0\n else:\n action = np.random.choice(range(4))\n\n return action", "def bump_moving_obstacle(\n state: State, action: Action, next_state: State\n) -> bool:\n return overlap(state, action, next_state, object_type=MovingObstacle)", "def move(self, action):\n self.time += 1\n\n # If ship is destroyed ship can only contemplate sadness and despair\n if not action or not self.is_playable():\n return None\n\n self.actualise = False\n\n if self.leroy_time == 1:\n self.back_to_normal()\n if self.leroy_time > 0:\n self.leroy_time -= 1\n\n # there is a chance that the ia enter in leroy mode\n # the ia goes mad for some time, acting randomly\n # added to allow the ships to explore the possible actions and not stay passive\n if not self.player and self.leroy_time == 0 and self.agent.behavior == \"network\" and random() < LEROY_RATE:\n self.leroy_jenkins()\n\n # training reward depending on position\n # self.agent.reward = self.go_bottom_reward()\n\n if isinstance(action, ActionOneHot):\n if action.pointing:\n self.pointing = Point(randint(0, DEFAULT_WIDTH-1), randint(0, DEFAULT_HEIGHT-1))\n elif isinstance(action, Action):\n if action.pointing:\n self.pointing = action.pointing\n # print(\"action.pointing\", action.pointing)\n # print(\"turn \", self.direction)\n\n if action.thrust:\n self.thrust()\n if action.shoot:\n self.shoot()", "def moveBall(self):\n \n #move ball one step\n vx = self._ball.get_vx()\n vy = self._ball.get_vy()\n self._ball.x = self._ball.x + vx\n self._ball.y = self._ball.y + vy\n \n #COLLISIONS\n if vy > 0:\n balltop = self._ball.y + BALL_DIAMETER\n if balltop >= GAME_HEIGHT:\n self._ball.set_vy(-vy)\n if (self._getCollidingObject() != None and\n self._getCollidingObject() != self._paddle):\n self._ball.set_vy(-vy)\n self._wall.removebrick(self._getCollidingObject())\n if vy < 0:\n ballbottom = self._ball.y\n if ballbottom <= 0:\n self._lostlife = True\n if self._getCollidingObject() == self._paddle:\n self._ball.set_vy(-vy)\n if (self._getCollidingObject() != None and\n self._getCollidingObject() != self._paddle):\n self._ball.set_vy(-vy)\n self._wall.removebrick(self._getCollidingObject())\n if vx > 0:\n ballright = self._ball.x + BALL_DIAMETER\n if ballright >= GAME_WIDTH:\n self._ball.set_vx(-vx)\n if vx < 0:\n ballleft = self._ball.x\n if ballleft <= 0:\n self._ball.set_vx(-vx)", "def _activate(self):\n # Increase the speed of the ball(s) slightly now the player has the\n # advantage of the laser.\n self.game.paddle.transition(LaserState(self.game.paddle, self.game))\n for ball in self.game.balls:\n ball.base_speed += 1", "def apply_action(self, action):\n x, y = action\n e_x, e_y = self._empty\n\n # check that the tile to move and the empty tile are neighbors\n if (math.fabs(x - e_x) == 1) ^ (math.fabs(y - e_y) == 1):\n # swap them\n self._tiles[y][x], self._tiles[e_y][e_x] = None, self._tiles[y][x]\n self._empty = x, y # empty tile has moved; store new location\n else:\n raise ValueError(\"Invalid move\")", "def next_state(self, action):\n self.state = self.states[action][self.state]", "def deactivate(self):\n self.game.paddle.transition(NormalState(self.game.paddle))\n for ball in self.game.balls:\n ball.base_speed -= 1", "def deactivate(self):\n self.game.paddle.transition(NormalState(self.game.paddle))\n for ball in self.game.balls:\n ball.base_speed -= 1", "def move_inward_outward(self):\r\n\r\n if self.movement == \"inward_outward\" and self.flag_move:\r\n leftPos, topPos, rightPos, bottomPos = self.canvas.coords(self.ball)\r\n if self.size_flag:\r\n self.change_size(\"larger\")\r\n elif not self.size_flag:\r\n self.change_size(\"smaller\")\r\n # If the ball hits a wall, change inward to outward.\r\n if leftPos <= 0 or rightPos >= 400 or topPos <= 0 or bottomPos >= 400:\r\n self.size_flag = 0\r\n # If the ball size reaches 1, change outward to inward.\r\n elif self.size == 1:\r\n self.size_flag = 1\r\n self.canvas.after(50, self.move_inward_outward)", "def _catch(self, ball):\n # Work out the position of the ball relative to the paddle.\n pos = (ball.rect.bottomleft[0] - self.game.paddle.rect.topleft[0],\n -ball.rect.height)\n ball.anchor(self.game.paddle, pos)", "def update(self, ai_settings):\n if self.moving_up and self.rect.top > 0:\n self.rect.centery -= ai_settings.paddle_speed_factor\n\n if self.moving_down and self.rect.bottom < self.screen_rect.bottom:\n self.rect.centery += ai_settings.paddle_speed_factor\n\n if self.moving_left and self.rect.left > 0:\n self.rect.centerx -= ai_settings.paddle_speed_factor\n\n if self.moving_right and self.rect.right < self.screen_rect.centerx:\n self.rect.centerx += ai_settings.paddle_speed_factor", "def update(self, action):\r\n p_reward = 0\r\n\r\n if self.player_Type == 'Human':\r\n y_, self.vy = action\r\n\r\n elif self.player_Type == 'Agent':\r\n y_ = self.y + self.action_Space[action]\r\n if y_ < 0:\r\n y_ = 0\r\n p_reward -= 10\r\n elif y_ > self.screen_Height - self.Height:\r\n y_ = self.screen_Height - self.Height\r\n p_reward -= 10\r\n\r\n self.vy = abs(self.y - y_)\r\n\r\n self.y = y_\r\n \r\n return p_reward", "def reset_ball(self, paddle_1, paddle_2):\r\n\r\n # Reset position and select new speeds\r\n self.x = 100\r\n self.y = np.random.randint(1, self.screen_Height-1)\r\n\r\n self.vx = np.random.randint(25, 30)\r\n self.vy = np.random.choice([-1, 1]) * np.random.randint(25, 30)\r\n\r\n\r\n p1_state, p2_state = self.state_observation(paddle_1, paddle_2)\r\n\r\n return p1_state, p2_state", "def play_step(self, action):\n self.players[0].moving_left = False\n self.players[0].moving_right = False\n if action == MOVE_LEFT:\n self.players[0].moving_left = True\n for i in range(LOOP_AT_EACH_MOVE_UPDATE):\n self.update(is_a_star=True)\n if self.dead_player or not self.players[0].is_alive:\n break\n self.players[0].moving_left = False\n if self.dead_player or not self.players[0].is_alive:\n return\n elif action == MOVE_RIGHT:\n self.players[0].moving_right = True\n for i in range(LOOP_AT_EACH_MOVE_UPDATE):\n self.update(is_a_star=True)\n if self.dead_player or not self.players[0].is_alive:\n break\n self.players[0].moving_right = False\n if self.dead_player or not self.players[0].is_alive:\n return\n elif action == SHOOT:\n if self.dead_player or not self.players[0].is_alive:\n self.update(is_a_star=True)\n return\n if not self.players[0].weapon.is_active:\n self.players[0].shoot()\n for i in range(LOOP_AT_EACH_MOVE_UPDATE):\n self.update(is_a_star=True)\n if self.dead_player or not self.players[0].is_alive:\n break\n if self.dead_player or not self.players[0].is_alive:\n return", "def move_bolt_up(self):\n self.y += self._velocity", "def update(self, opponent_action, player_action):\n\n if self.colour == 'upper':\n self.game_in_head.update(player_action, opponent_action)\n else:\n self.game_in_head.update(opponent_action, player_action)", "def step(self):\n\n self.ball_x = self.ball_x + self.vel_x\n self.ball_y = self.ball_y + self.vel_y\n if self.ball_y >= 480:\n self.vel_y *= -1\n elif self.ball_y <= 0:\n self.vel_y *= -1\n if self.ball_x >= 640:\n self.vel_x *= -1\n elif self.ball_x <= 0:\n self.vel_x *= -1", "def move(self, action):\n ligne = self.location_[0] + self.actions_[action][0]\n column = self.location_[1] + self.actions_[action][1]\n newLocation = (ligne, column)\n self.location_ = newLocation\n newState = (self.location_[0] * self.width ) + self.location_[1]\n\n if self.location_[0] == 0 and self.location_[0] == 0:\n return 0\n\n return newState", "def makemove(self,boxId):\n self.gameState[boxId] = self.currentplayer\n self.currentplayer = self.currentplayer * -1\n self.board.updateBoardState(self.gameState)", "def __process_input(self, input_):\n if self.state.game_over:\n if input_.key_pressed:\n self.state.exit = True\n else:\n if input_.action == 'PLAYER_UP':\n self.state.player.direction = 'U'\n elif input_.action == 'PLAYER_DOWN':\n self.state.player.direction = 'D'\n elif input_.action == 'PLAYER_LEFT':\n self.state.player.direction = 'L'\n elif input_.action == 'PLAYER_RIGHT':\n self.state.player.direction = 'R'", "def move_down(self):\n #if user moves paddle right below on the screen, they won't be able to move it more downwards by using this if statement\n #SCREEN_HEIGHT - 280 = Exact number of pixels where paddle can stop exactly on bottom edge but still has its body fully shown\n if self.center.y > SCREEN_HEIGHT - 280:\n self.center.y -= MOVE_AMOUNT", "def move(self, state_prev, state, reward, selected):\n\n if state:\n if self.team_id == 1: # Set correct teams based on team id\n self_team = state['team1']\n other_team = state['team2']\n else:\n self_team = state['team2']\n other_team = state['team1']\n\n if state:\n if self.id == 0: # Special for the goal-keeper\n ai_gk_pass = self.gk_pass(\n other_team['players'], self_team['goal_x'])\n ai_gk_move = self.gk_move(self_team['goal_x'], state['ball'])\n # GK has the ball\n if selected == self.id and state['ball'].ball_stats['player'] == self.id:\n if ai_gk_pass != 'NOTHING':\n return ai_gk_pass\n else:\n return ai_gk_move\n else:\n return ai_gk_move\n\n # Selected player has the ball\n if selected == self.id and state['ball'].ball_stats['player'] == self.id:\n ai_shoot = self.ai_shoot(\n other_team['players'][0], other_team['goal_x'])\n ai_pass = self.ai_pass(\n self_team['players'], other_team['players'])\n # If shot is possible, take it\n if self.pos.dist(P(other_team['goal_x'], H//2)) <= AI_SHOOT_RADIUS and ai_shoot != 'NOTHING':\n return ai_shoot\n # Else, pass if possible (passes towards the enemy goal are prioritized)\n elif ai_pass != 'NOTHING' and random.random() >= AI_PASS_PROB:\n return ai_pass\n else:\n # Move towards the goal\n return self.ai_move_with_ball(other_team['players'], other_team['goal_x'])\n\n else: # Move towards the ball if posssbile, otherwise return to formation\n move = self.ai_move_without_ball(state['ball'])\n if move != 'NOTHING':\n return move\n else:\n return 'FORM' # Special action, not defined in ACT\n else:\n return 'NOTHING' # Otherwise do nothing", "def _activate(self):\n self.game.paddle.ball_collide_callbacks.append(self._catch)\n\n # Monitor for spacebar presses to release a caught ball.\n receiver.register_handler(pygame.KEYUP, self._release_ball)", "def update_action(self, status, x, y, dist_in):\n print(\"CURRENT STATE:\", self.status)\n print(\"DISTANCE IN IN: \", dist_in)\n\n # Startup state\n if status == 'STARTUP':\n # Determine the states needed for our path\n self.calculate_path()\n\n # Wait for user input to start\n raw_input('Press Enter to continue...')\n self.next_state()\n\n # Drive forward state\n elif status == 'FORWARD':\n # If the current apriltag in view is either the smallbot's right or left tag\n # and the current apriltag's x position is within the camera bounds\n if (x < Constants.MAX_CAM_X_BOUND and x > Constants.MIN_CAM_X_BOUND) and \\\n (self.current_tag == self.left_tag or self.current_tag == self.right_tag):\n self.current_action = \"drive\"\n else:\n # Reset current action for 1 iteration to avoid data overlap\n self.current_action = 'none'\n print(\"DONE DRIVING STRAIGHT---------------------------------\")\n self.next_state()\n\n # Drive backwards state\n elif status == 'BACKWARDS':\n # If the current apriltag in view is either the smallbot's right or left tag\n # and as long as the apriltag's x position is less than or equal to the max camera bound\n if (x <= Constants.MAX_CAM_X_BOUND) and \\\n (self.current_tag == self.left_tag or self.current_tag == self.right_tag):\n self.current_action = \"drivebackwards\"\n else:\n self.next_state()\n\n # Turn right state\n elif status == 'TURN_RIGHT':\n # Keep turning right while the smallbot is not done achieving its goal angle aka 90 deg\n if self.is_done_turning() != 'done_turning':\n self.current_action = 'turnright'\n else:\n # Reset current action for 1 iteration to avoid data overlap\n self.current_action = 'none'\n # Capture the current apriltag's distance from the camera after turn\n self.dist_after_turn = dist_in\n print(\"CAPTURED DIST: \", self.dist_after_turn)\n self.next_state()\n\n # Turn left state\n elif status == 'TURN_LEFT':\n print(\"INSIDE TURN LEFT\")\n # Keep turning left while the smallbot is not done achieving its goal angle aka -90 deg\n if self.is_done_turning() != 'done_turning':\n self.current_action = 'turnleft'\n else:\n # Reset current action for 1 iteration to avoid data overlap\n self.current_action = 'none'\n # Capture the current apriltag's distance from the camera after turn\n self.dist_after_turn = dist_in\n print(\"CAPTURED DIST: \", self.dist_after_turn)\n self.next_state()\n\n # Creep forward state\n elif status == 'CREEP_FORWARD':\n print(\"current Y VAL AT Y: \", y)\n # If it sees the back apriltag then keep going straight for the defined TRAVEL_DIST\n if (dist_in < self.dist_after_turn + Constants.FWD_TRAVEL_DIST) and self.current_tag == self.back_tag:\n self.current_action = \"drive\"\n print(\"INSIDE IF STATMT----CURRENT Y VAL: \", y)\n else:\n print(\"----------GONE TO NEXT STATE----------\")\n self.next_state()\n\n # Creep backwards state\n elif status == 'CREEP_BACKWARD':\n print(\"current Y VAL AT Y: \", y)\n # If it sees the back apriltag then keep going backwards for the defined\n # TRAVEL_DIST times the number of times it creeped forward\n if (dist_in > self.dist_after_turn - (\n Constants.FWD_TRAVEL_DIST * self.times_driven_forward)) and self.current_tag == self.back_tag:\n self.current_action = \"drivebackwards\"\n print(\"INSIDE IF STATMT----CURRENT Y VAL: \", y)\n else:\n print(\"----------GONE TO NEXT STATE----------\")\n self.next_state()\n\n # Halt state\n elif status == 'HALT':\n # First stop\n self.current_action = 'stop'\n # Then go to next state\n self.next_state()\n\n # Dump state\n elif status == 'DUMP':\n self.current_action = 'dump'\n self.next_state()\n\n # Stop state\n elif status == 'STOP':\n self.current_action = 'stop'\n\n # Default state\n else:\n self.current_action = 'none'", "def apply_action(self, physics, action, random_state):\n del random_state\n physics.bind(self.actuators).ctrl = action", "def update_to_state(self, game_state):\n pass", "def doAction(self, state, action):\n self.lastState = state\n self.lastAction = action", "def take_action(self, state):", "def move(self, agent, action):\n\t\tpass", "def state_cb(self, msg):\n self.prev_state = deepcopy(self.current_state)\n self.current_state = msg\n\n if self.current_state.mode == \"MANUAL\":\n if self.offboard_point_streaming:\n rospy.loginfo(\"Setpoint stream DISABLED\")\n self.stop_streaming_offboard_points()\n\n if self.current_state.mode == \"POSCTL\":\n if not self.offboard_point_streaming:\n rospy.loginfo(\"Setpoint stream ENABLED\")\n self.start_streaming_offboard_points()\n if not self.prev_state.mode == \"POSCTL\":\n # just switched into POSCTL, call hover\n self.hover()\n\n if self.current_state.mode == \"OFFBOARD\":\n if not self.prev_state.mode == \"OFFBOARD\":\n # just switched to OFFBOARD, call move\n rospy.loginfo(\"Entering OFFBOARD Mode\")\n for i in range(0,len(velocities)):\n maneuver_velocity_setpoint=velocities[i]\n maneuver_reference_frame = maneuver_reference_Frame\n maneuver_duration=duration[i]\n self.execute_maneuver( self.maneuver_velocity_setpoint, \n self.maneuver_reference_frame, \n self.maneuver_duration)", "def mark(state, opp, distMar):\n vect = (state.ball_pos - opp.position).normalize()\n vect.norm = distMar\n return goTo(state, opp.position + vect)", "def step(self, action):\n # TODO: code here\n y, x = self.state\n dy, dx = self.moves[action]\n next_x, next_y = x+dx, y+dy\n\n next_x = np.clip(next_x, 0, self.width-1) # clip the values to the world\n next_y = np.clip(next_y, 0, self.height-1) # clip the values to the world\n\n if next_y == 1:\n rand = np.random.uniform()\n if rand < 0.2:\n next_x += 1\n elif rand < 0.7:\n next_x += 2\n else:\n next_x += 3\n\n next_x = np.clip(next_x, 0, self.width - 1)\n\n if next_x == 4 and next_y == 1:\n reward = -1\n done = True\n elif next_x == 4 and next_y == 2:\n reward = 1\n done = True\n else:\n reward = 0\n done = False\n\n next_state = (next_y, next_x)\n self.state = next_state\n\n return next_state, reward, done, {}", "def __step(self, p):\n action = self.__action(p)\n temp_state = self.state\n\n if self.state == 0:\n if action == 1:\n self.state += 1\n elif self.state == 1:\n if action == 1:\n self.state -= 1\n else:\n self.state += 1\n else:\n if action == 1:\n self.state += 1\n else:\n self.state -= 1\n \n self.trajectory.append([temp_state, action, self.__reward(self.state)])", "def _next_state(self, state, action):\n\n # Transition table to define movement for each action\n if self.action_type == 'VonNeumann':\n transitions = {0: [-1, 0], 1: [+1, 0], 2: [0, -1], 3: [0, +1]}\n elif self.action_type == 'Moore':\n transitions = {0: [-1, 0], 1: [+1, 0], 2: [0, -1], 3: [0, +1],\n 4: [-1, +1], 5: [+1, +1], 6: [-1, -1], 7: [+1, -1]}\n\n new_state = [state[0] + transitions[action][0], state[1] + transitions[action][1]]\n if self.maze[new_state[0]][new_state[1]] == 1: # Hit wall, stay there\n return state\n else: # Valid move for 0, 2, 3, 4\n return new_state", "def move(self):\n if self.ycor() > 280: self.y_dir = -1 # Set vertical movement to down if ball at top of screen\n if self.xcor() > 380: self.x_dir = -1 # Set horizontal movement to left if ball at right of screen\n if self.xcor() < -380: self.x_dir = 1 # Set horizontal movement to right if ball at left of screen\n new_x = self.xcor() + self.x_dir * 2 # Define 2 spaces forward in set horizontal dir of travel\n new_y = self.ycor() + self.y_dir * 2 # Define 2 spaces forward in set vertical dir of travel\n self.goto(new_x, new_y) # Move ball to newly defined position", "def update(self):\r\n\r\n # Two states, either target is shot or its moving\r\n if self.state == 'shot':\r\n self.hold -= 1\r\n self.z -= 0.03\r\n self.y -= 5\r\n if self.z < 0:\r\n self.z = 0\r\n self.opacity -= 4 if self.opacity > 0 else 0\r\n self.sprite.set_position(self.x, self.y)\r\n self.sprite.update(scale = self.z, rotation = int((self.y % 356)))\r\n self.sprite.opacity = self.opacity\r\n if self.hold <= 0:\r\n self.sprite.delete()\r\n elif self.state == 'moving':\r\n self.x += (self.speed * self.x_direction)\r\n self.y += (self.speed * self.y_direction)\r\n\r\n self.sprite.set_position(self.x, self.y)\r\n\r\n\r\n if int(t.time()) - self.last_change_x_time > 1:\r\n if random.random() <= self.prob_change_x:\r\n self.x_direction *= -1\r\n self.last_change_x_time = int(t.time())\r\n\r\n if int(t.time()) - self.last_change_y_time > 1:\r\n if random.random() <= self.prob_change_y:\r\n self.y_direction *= -1\r\n self.last_change_y_time = int(t.time())\r\n\r\n if self.x < self.anchor_x:\r\n print(f'set to {self.anchor_x}')\r\n self.x = self.anchor_x\r\n self.x_direction *= -1\r\n if (self.x + self.anchor_x) > (config.window_width):\r\n self.x = (config.window_width-self.anchor_x)\r\n self.x_direction *= -1\r\n\r\n if self.y < self.anchor_y:\r\n self.y = self.anchor_y\r\n self.y_direction *= -1\r\n if (self.y + self.anchor_y) > (config.window_height):\r\n self.y = (config.window_height-self.anchor_y)\r\n self.y_direction *= -1\r\n\r\n if self.x_direction == 1:\r\n self.sprite.update(rotation = 2*self.speed)\r\n elif self.x_direction == -1:\r\n self.sprite.update(rotation = -(2*self.speed))", "def nextState(self, state, action):\n return state + action", "def act(self, state):\n return", "def sample_action(self, state):\n # YOUR CODE HERE\n if state[0] == 20 or state[0] == 21: #Now we should stick (0)\n action = 0\n else: # Otherwise hit\n action = 1\n \n return action", "def movement(self, action):\r\n\r\n #if its moving horizontally only can move vertically in the next move\r\n if self.velocities[1] == 0:\r\n if action == 0 :\r\n self.velocities[0] = 0\r\n self.velocities[1] = -1\r\n if action == 1 :\r\n self.velocities[0] = 0\r\n self.velocities[1] = 1\r\n\r\n #if its moving vertically only can move horizontally in the next move\r\n if self.velocities[0] == 0:\r\n if action == 2 :\r\n self.velocities[0] = -1\r\n self.velocities[1] = 0\r\n if action == 3 :\r\n self.velocities[0] = 1\r\n self.velocities[1] = 0\r\n \r\n self.displacement()", "def update(self, paddle_1, paddle_2):\r\n done = False\r\n \r\n p1_reward = 0\r\n p2_reward = 0\r\n\r\n # Move ball and move to edges if past boundary\r\n x_ = self.x + self.vx\r\n y_ = self.y + self.vy\r\n\r\n if x_ < self.left_x:\r\n x_ = self.left_x\r\n elif x_ > self.right_x:\r\n x_ = self.right_x\r\n\r\n if y_ < self.top_y:\r\n y_ = self.top_y\r\n elif y_ > self.bot_y:\r\n y_ = self.bot_y\r\n\r\n\r\n # Contact with top or bottom\r\n if y_ == self.top_y or y_ == self.bot_y:\r\n self.vy *= -1\r\n\r\n\r\n # Left side\r\n if x_ == self.left_x:\r\n if paddle_1.y <= y_ <= (paddle_1.y + paddle_1.Height):\r\n x_ += self.Radius\r\n change = abs(paddle_1.vy//8)\r\n self.vx = -1*self.vx + change//2\r\n if self.vy < 0:\r\n self.vy -= change\r\n else:\r\n self.vy += change\r\n\r\n\r\n self.rallies += 1\r\n\r\n p1_reward += 100\r\n p2_reward -= 0\r\n else:\r\n p1_reward -= 100\r\n p2_reward += 0\r\n done = True\r\n\r\n\r\n # Right side\r\n elif x_ == self.right_x:\r\n if paddle_2.y <= y_ <= (paddle_2.y + paddle_2.Height):\r\n x_ -= self.Radius\r\n change = abs(paddle_2.vy//8)\r\n self.vx = -1*self.vx - change//2\r\n if self.vy < 0:\r\n self.vy -= change\r\n else:\r\n self.vy += change\r\n\r\n self.rallies += 1\r\n\r\n p1_reward -= 0\r\n p2_reward += 100\r\n else:\r\n p1_reward += 0\r\n p2_reward -= 100\r\n done = True\r\n\r\n\r\n\r\n # Update ball position and velocity if exceeded\r\n if not done:\r\n self.x = x_\r\n self.y = y_\r\n\r\n if self.vx > self.V_max:\r\n self.vx = self.V_max\r\n elif self.vx < -self.V_max:\r\n self.vx = -self.V_max\r\n \r\n if self.vy > self.V_max:\r\n self.vy = self.V_max\r\n elif self.vy < -self.V_max:\r\n self.vy = -self.V_max\r\n\r\n\r\n p1_state, p2_state = self.state_observation(paddle_1, paddle_2)\r\n\r\n return p1_state, p2_state, p1_reward, p2_reward, done", "def _take_action(self, action):\n\n if isinstance(action, list) or isinstance(action, np.ndarray):\n action = action[0]\n\n if self.continuous:\n increment = np.array([1.5*np.cos(action),1.5*np.sin(action)])\n else:\n increment = np.array([0.0,0.0])\n if action == 0:\n increment[0] = 1.5\n elif action == 1:\n increment[0] = 1.225\n increment[1] = 1.225\n elif action == 2:\n increment[1] = 1.5\n elif action == 3:\n increment[0] = -1.225\n increment[1] = 1.225\n elif action == 4:\n increment[0] = -1.5\n elif action == 5:\n increment[0] = -1.225\n increment[1] = -1.225\n elif action == 6:\n increment[1] = -1.5\n elif action == 7:\n increment[0] = 1.225\n increment[1] = -1.225\n else:\n print('NOP!')\n\n self.dog_pose += increment\n self._update_environment()", "def sample(self, state, action):\n in_target=False\n if action not in self.available(state):\n return None\n # N = len(self.post(state, action))\n prob = []\n for t in self.post(state, action):\n prob.append(self.prob_delta(state, action, t))\n\n rand_val = random.random()\n total = 0\n for key in self.post(state,action):\n total +=self.prob_delta(state,action,key)\n\n if rand_val <= total:\n\n next_state=key\n break\n (x,y,t)=state\n ballpos = (-200, 0)\n if (abs(x) > 1000 or abs(y) > 1000) or (abs(y) <= 400 and x <= 0) or (t < 115 or t > 245):\n in_target=True\n\n\n if x==0 and y==0 and t==180:\n\n in_target=True\n\n\n # next_state = self.post(state, action)[np.random.choice(range(len(self.post(state, action))),1,prob)[0]]\n # Note that only one element is chosen from the array, which is the\n # output by random.choice\n return next_state,in_target", "def move(o, action):\n # if action not in Act: raise...?\n { Act.Down : lambda: o.applyGravity(),\n Act.Left : lambda: o._tryShift(o.block,Point(-1,0)),\n Act.Right : lambda: o._tryShift(o.block,Point( 1,0)),\n Act.Drop : lambda: o._setBlock(o.shadowBlock),\n Act.Hold : lambda: o._Hold(),\n Act.RotCW : lambda: o._Rotate(clockwise),\n Act.RotCCW: lambda: o._Rotate(counterClockwise),\n }[action]()", "def jump(self):\r\n if self.grounded == True:\r\n self.vel.y = -13", "def handle_pygame_event(self, event):\n if event.type != KEYDOWN:\n # nothing to do\n return\n if event.key == pygame.K_LEFT:\n self.model.change_paddle_velocity(-1)\n elif event.key == pygame.K_RIGHT:\n self.model.change_paddle_velocity(1)", "def take_action(self, action):\r\n\r\n self._update_velocity(action)\r\n self._update_position()\r\n if self.is_terminal_state():\r\n return 100.0\r\n\r\n return -1.0", "def execute(self, action):\n if isinstance(action, top.StateChangeAction):\n if self._plumb is not None:\n self._plumb.set_component_state(action.component, action.state)", "def updatePaddle(self, Input):\n assert isinstance(Input,GInput)\n change = 0\n if Input.is_key_down('left'):\n if self._paddle.getX() >= (self._paddle.getWidth()/2):\n change -= self._paddle.getSpeed()\n if Input.is_key_down('right'):\n if self._paddle.getX() <= (GAME_WIDTH - (self._paddle.getWidth()/2)):\n change += self._paddle.getSpeed()\n self._paddle.setX(self._paddle.getX() + change)", "def transition_function(state, action):\n results = []\n\n if action.action_type == Action.NOOP:\n results.append((state, 1.0))\n\n elif action.action_type == Action.GRASP:\n # point distribution for success mixed with point distribution for failure\n alpha = 0.8\n\n # success - gripper moves to object position and holds object\n success_state = deepcopy(state)\n obj = utils.get_object(success_state, action.name)\n if obj is None:\n alpha = 0\n else:\n gripper = utils.get_object(success_state, 'gripper')\n if obj.__class__ == Drawer:\n gripper.x = obj.x + (obj.width - 1)/2 + 1\n gripper.y = obj.y\n gripper.z = 2\n else:\n gripper.x = obj.x\n gripper.y = obj.y\n gripper.z = obj.z\n gripper.holding = obj.name\n gripper.closed = True\n\n results.append((success_state, alpha*1.0))\n\n # failure - no change\n results.append((state, (1 - alpha)*1.0))\n\n elif action.action_type == Action.PLACE:\n gripper = utils.get_object(state, 'gripper')\n new_z = utils.ray_trace(action.position.x, action.position.y)\n\n # point distribution for success mixed with point distribution for failure\n alpha = 0.8\n\n # success - gripper moves to place position and releases object\n success_state = deepcopy(state)\n obj = utils.gripper_object(success_state)\n gripper_move = utils.get_object(success_state, 'gripper')\n if obj is not None and obj.__class__ == Drawer:\n alpha = 0\n else:\n if obj is not None:\n if obj.__class__ == Container:\n obj.x = action.position.x + obj.x - gripper_move.x\n obj.y = action.position.y + obj.y - gripper_move.y\n else:\n obj.x = action.position.x\n obj.y = action.position.y\n obj.z = new_z\n gripper_move.x = action.position.x\n gripper_move.y = action.position.y\n gripper_move.z = new_z\n gripper_move.closed = False\n gripper_move.holding = ''\n results.append((success_state, alpha*1.0))\n\n # failure - no change\n results.append((state, (1 - alpha)*1.0))\n\n elif action.action_type == Action.OPEN_GRIPPER:\n gripper = utils.get_object(state, 'gripper')\n if not gripper.closed:\n results.append((state, 1.0))\n else:\n success_state = deepcopy(state)\n gripper = utils.get_object(state, 'gripper')\n gripper.closed = False\n obj = utils.gripper_object(success_state)\n if obj is None:\n results.append((success_state, 1.0))\n else:\n states = [success_state]\n probs = [1.0]\n prob_sum = 0\n decay = 1.0\n for z in range(obj.z - 1, -1, -1):\n decay *= 0.8\n if obj.__class__ == Item:\n for i in range(obj.z - z, obj.z + z + 1):\n for j in range(obj.z - z, obj.z + z + 1):\n states.append(utils.copy_state_move_object(success_state, obj.unique_name, i, j, z - obj.z))\n p = 1.0/(pow(2*(obj.z - z) + 1, 2))\n p *= decay\n probs.append(p)\n prob_sum += p\n elif obj.__class__ == Container:\n for i in range(int((obj.z - z)/2), int((obj.z + z)/2) + 1):\n for j in range(int((obj.z - z)/2), int((obj.z + z)/2) + 1):\n states.append(utils.copy_state_move_object(success_state, obj.unique_name, i, j, z - obj.z))\n p = 1.0/(pow(2*(int((obj.z - z)/2)) + 1, 2))\n p *= decay\n probs.append(p)\n prob_sum += p\n elif obj.__class__ == Lid:\n states.append(utils.copy_state_move_object(success_state, obj, 0, 0, z - obj.z))\n probs.append(decay)\n for i in range(len(probs)):\n probs[i] /= prob_sum\n results.extend(zip(states, probs))\n\n elif action.action_type == Action.CLOSE_GRIPPER:\n gripper = utils.get_object(state, 'gripper')\n if gripper.closed:\n results.append((state, 1.0))\n else:\n success_state = deepcopy(state)\n gripper = utils.get_object(state, 'gripper')\n gripper.closed = True\n if 'gripper_on_apple' and 'gripper_level_with_apple':\n gripper.holding = 'apple'\n results.append((success_state, 1.0))\n elif 'gripper_on_batteries' and 'gripper_level_with_batteries':\n gripper.holding = 'batteries'\n results.append((success_state, 1.0))\n elif 'gripper_on_flashlight' and 'gripper_level_with_flashlight':\n gripper.holding = 'flashlight'\n results.append((success_state, 1.0))\n elif 'gripper_on_granola' and 'gripper_level_with_granola':\n gripper.holding = 'granola'\n results.append((success_state, 1.0))\n elif 'gripper_on_knife' and 'gripper_level_with_knife':\n gripper.holding = 'knife'\n results.append((success_state, 1.0))\n elif 'gripper_on_small' and 'gripper_level_with_small':\n gripper.holding = 'small'\n results.append((success_state, 1.0))\n elif 'gripper_on_lid' and 'gripper_level_with_lid':\n failure_state = deepcopy(success_state)\n gripper.holding = 'lid'\n results.append((success_state, 0.1))\n results.append((failure_state, 0.9))\n elif 'gripper_touching_drawer' and 'gripper_right_of_drawer' and 'gripper_level_with_drawer':\n failure_state = deepcopy(success_state)\n gripper.holding = 'drawer'\n results.append((success_state, 0.2))\n results.append((failure_state, 0.8))\n elif 'gripper_on_large' and 'gripper_level_with_large':\n failure_state = deepcopy(success_state)\n gripper.holding = 'large'\n results.append((success_state, 0.875))\n results.append((failure_state, 0.125))\n\n elif action.action_type == Action.MOVE_ARM:\n pass\n\n elif action.action_type == Action.RAISE_ARM:\n alpha = 1.0\n gripper = utils.get_object(state, 'gripper')\n if 'gripper_on_lid' in state.relations and 'gripper_below_lid' in state.relations:\n alpha *= 0.8\n if 'gripper_on_drawer' in state.relations and 'gripper_below_drawer' in state.relations:\n alpha *= 0.8\n if 'gripper_on_stack' in state.relations and 'gripper_below_stack' in state.relations:\n alpha *= 0.8\n if 'gripper_on_small' in state.relations and 'gripper_below_small' in state.relations:\n alpha *= 0.8\n if 'gripper_on_large' in state.relations and 'gripper_below_large' in state.relations:\n alpha *= 0.8\n if gripper.holding in ['lid', 'small', 'large']:\n alpha *= 0.8\n success_state = deepcopy(state)\n gripper = utils.get_object(success_state, 'gripper')\n gripper.z += 1\n if gripper.z > 4:\n gripper.z = 4\n obj = utils.gripper_object(success_state)\n obj.z += 1\n if obj.z > 4:\n obj.z = 4\n results.append((success_state, alpha*1.0))\n\n # failure - no change\n results.append((state, (1 - alpha)*1.0))\n\n elif action.action_type == Action.LOWER_ARM:\n alpha = 1.0\n if 'gripper_on_lid' in state.relations and 'gripper_level_with_lid' in state.relations \\\n or 'gripper_on_small' in state.relations and 'gripper_level_with_small' in state.relations \\\n or 'gripper_on_large' in state.relations and 'gripper_level_with_large' in state.relations:\n alpha = 0\n else:\n gripper = utils.get_object(state, 'gripper')\n if 'gripper_on_drawer' in state.relations and 'gripper_above_drawer' in state.relations:\n alpha *= 0.8\n if 'gripper_on_stack' in state.relations and 'gripper_above_stack' in state.relations:\n alpha *= 0.8\n if gripper.holding in ['lid', 'small', 'large']:\n alpha *= 0.8\n success_state = deepcopy(state)\n gripper = utils.get_object(success_state, 'gripper')\n gripper.z -= 1\n if gripper.z < 0:\n gripper.z = 0\n obj = utils.gripper_object(success_state)\n obj.z -= 1\n if obj.z < 0:\n obj.z = 0\n results.append((success_state, alpha*1.0))\n\n # failure - no change\n results.append((state, (1 - alpha)*1.0))\n\n elif action.action_type == Action.RESET_ARM:\n # point distribution for success mixed with point distribution for failure\n alpha = 0.8\n\n # success - gripper moves to object position and holds object\n success_state = deepcopy(state)\n gripper = utils.get_object(success_state, 'gripper')\n gripper.x = 8\n gripper.y = 1\n gripper.z = 2\n\n results.append((success_state, alpha*1.0))\n\n # failure - no change\n results.append((state, (1 - alpha)*1.0))\n\n return results", "def execute(self, cast):\r\n \r\n paddles = cast[\"paddle\"]\r\n bricks = cast[\"brick\"]\r\n ball = cast[\"ball\"][0]\r\n score = cast[\"score\"][0]\r\n \r\n # breaks the bricks the ball runs into\r\n for brick in bricks:\r\n if ball.get_position().equals(brick.get_position()):\r\n ball.get_velocity().invert_y()\r\n bricks.remove(brick)\r\n # updates the score when a brick is removed\r\n score.updateScore()\r\n break \r\n # bounces off the paddle\r\n for paddle in paddles:\r\n if ball.get_position().equals(paddle.get_position()):\r\n ball.get_velocity().invert_y()\r\n break\r\n # change direction of the ball off right wall \r\n if ball.get_position().get_x() > MAX_X - 2:\r\n ball.get_velocity().invert_x()\r\n # change direction of the ball off the left wall \r\n if ball.get_position().get_x() < 2:\r\n ball.get_velocity().invert_x()\r\n # change direction of the ball off the ceiling \r\n if ball.get_position().get_y() < 2 :\r\n ball.get_velocity().invert_y()\r\n # ends the game if the paddle misses the ball \r\n if ball.get_position().get_y() == MAX_Y - 1:\r\n quit()", "def prep_robot_action(self):\n if self.robot.sensing:\n action_str = \"Action: Sensing...\"\n elif self.robot.moving_forward or self.robot.moving_backward:\n action_str = \"Action: Moving...\"\n else:\n action_str = \"Action: Target reached...\"\n # Prepare the image and positions it on the screen\n self.action_image = self.font.render(action_str, True, self.text_color, self.bg_color)\n self.action_rect = self.action_image.get_rect()\n self.action_rect.left = self.state_rect.left\n self.action_rect.top = self.state_rect.bottom + self.line_gap", "def update(self, timer, input):\n a = self.get_ship()\n if a.x<=GAME_WIDTH-SHIP_WIDTH-1 and \\\n input.is_key_down('right')==True:\n a.x+=self.get_speed()\n if a.x>SHIP_WIDTH and input.is_key_down('left')==True:\n a.x-=self.get_speed()\n press = 0\n self.MoveAliens(timer)\n if input.is_key_down('spacebar'):\n press = input.key_count\n pressed=(press!=0 and self._numkey==0)\n self.bolt_check()\n if pressed==True and self.get_plyrbolts()==0:\n self.get_bolts().append(Bolt(a.x, a.y))\n self.caller(press,timer)", "def move(self, bh: BaseBoundaryHandler) -> None:\n\n self.position += self.velocity\n self.position = bh(self.position)", "def get_action(self, state):\n\n \"\"\"\n XXX: DO NOT MODIFY THAT FUNCTION !!!\n Doing so will result in a 0 grade.\n \"\"\"\n\n # XXX : You shouldn't care on what is going on below.\n # Variables are specified in constructor.\n if self.beliefGhostStates is None:\n self.beliefGhostStates = state.getGhostBeliefStates()\n if self.walls is None:\n self.walls = state.getWalls()\n return self.updateAndGetBeliefStates(\n self._computeNoisyPositions(state))", "def _set_action(self, action):\n\n rospy.logdebug(\"Start Set Action ==>\"+str(action))\n # We convert the actions to speed movements to send to the parent class of Parrot\n linear_speed_vector = Vector3()\n angular_speed = 0.0\n\n if action == 0: # FORWARDS\n linear_speed_vector.x = self.linear_forward_speed\n self.last_action = \"FORWARDS\"\n elif action == 1: # BACKWARDS\n linear_speed_vector.x = -1*self.linear_forward_speed\n self.last_action = \"BACKWARDS\"\n elif action == 2: # STRAFE_LEFT\n linear_speed_vector.y = self.linear_forward_speed\n self.last_action = \"STRAFE_LEFT\"\n elif action == 3: # STRAFE_RIGHT\n linear_speed_vector.y = -1*self.linear_forward_speed\n self.last_action = \"STRAFE_RIGHT\"\n elif action == 4: # UP\n linear_speed_vector.z = self.linear_forward_speed\n self.last_action = \"UP\"\n elif action == 5: # DOWN\n linear_speed_vector.z = -1*self.linear_forward_speed\n self.last_action = \"DOWN\"\n\n # We tell drone the linear and angular speed to set to execute\n self.move_base(linear_speed_vector,\n angular_speed,\n epsilon=0.05,\n update_rate=10)\n\n rospy.logdebug(\"END Set Action ==>\"+str(action))", "def bouncing(self):\n x = random.randint(-250, 250) # where the ball will bounce on the X axis\n left_x = -850\n right_x = 850\n rand_y = random.randint(-350, 350) # random height where the ball goes\n floor = -350 # bouncing floor\n\n if self.xcor() > 300:\n self.goto(x, floor)\n self.goto(left_x, rand_y)\n elif self.xcor() < -300:\n self.goto(x, floor)\n self.goto(right_x, rand_y)", "def joy_callback(self, msg):\n mappings = gamepad_mappings.set_gamepad_mappings(msg)\n self.move_vertical = mappings[\"button_vertical\"] # up: +1.0, down: -1.0\n self.move_horizontal = mappings[\"button_horizontal\"] # left: +1.0, right: -1.0", "def keyup(key):\r\n \r\n global paddle1_vel, paddle2_vel\r\n \r\n if key == simplegui.KEY_MAP[\"w\"]:\r\n paddle1_vel = 0\r\n elif key == simplegui.KEY_MAP[\"s\"]:\r\n paddle1_vel = 0\r\n\r\n if key == simplegui.KEY_MAP[\"up\"]:\r\n paddle2_vel = 0\r\n elif key == simplegui.KEY_MAP[\"down\"]:\r\n paddle2_vel = 0" ]
[ "0.7522475", "0.6744767", "0.6475455", "0.63246346", "0.6307841", "0.63031274", "0.6293542", "0.6239462", "0.6162285", "0.6160075", "0.6146712", "0.6122981", "0.60952175", "0.6076362", "0.60531354", "0.60355747", "0.6020236", "0.5992324", "0.5984819", "0.5976522", "0.59671056", "0.5965766", "0.59569", "0.59323955", "0.59174424", "0.59173375", "0.58995175", "0.58812284", "0.58639646", "0.586359", "0.5820582", "0.5813605", "0.580474", "0.580474", "0.57874167", "0.5776327", "0.57746124", "0.5769196", "0.5761911", "0.57477105", "0.57423353", "0.5726553", "0.5725652", "0.5700009", "0.56956196", "0.56902605", "0.56846344", "0.5683509", "0.5683509", "0.56638557", "0.5655362", "0.5653619", "0.5647782", "0.5644197", "0.5634731", "0.5634386", "0.5632811", "0.5632541", "0.5624854", "0.5617039", "0.5615198", "0.5608732", "0.5606501", "0.55909854", "0.5590016", "0.55877495", "0.5584443", "0.5583584", "0.55746204", "0.5572257", "0.5567476", "0.55671316", "0.55574", "0.55558914", "0.5555336", "0.55541414", "0.5554007", "0.5544156", "0.5526189", "0.552346", "0.551607", "0.55143297", "0.5510861", "0.55086976", "0.55048144", "0.5500489", "0.5478933", "0.54582703", "0.5455704", "0.5447381", "0.5439749", "0.5435699", "0.54329425", "0.54138255", "0.54125667", "0.5406087", "0.5401073", "0.53993285", "0.5395192", "0.53763735" ]
0.6945269
1
Generates a credentials object for the current environment.
def get_credentials(): credentials, _project_id = google.auth.default(scopes=SCOPES) # Credentials from the GCloud SDK, for example, do not implement Signing. assert isinstance(credentials, google.auth.credentials.Signing), \ "Unsupported credential kind; credentials must implement Signing" return credentials
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def credentials():\n\n username = os.environ.get('OS_USERNAME')\n password = os.environ.get('OS_PASSWORD')\n tenant_name = (os.environ.get('OS_TENANT_NAME') or\n os.environ.get('OS_PROJECT_NAME'))\n auth_url = os.environ.get('OS_AUTH_URL')\n\n config = configparser.RawConfigParser()\n if config.read(_CREDS_FILE):\n username = username or config.get('admin', 'user')\n password = password or config.get('admin', 'pass')\n tenant_name = tenant_name or config.get('admin', 'tenant')\n auth_url = auth_url or config.get('auth', 'uri')\n\n return {\n 'username': username,\n 'password': password,\n 'tenant_name': tenant_name,\n 'uri': auth_url\n }", "def create_credentials():\r\n creds = None\r\n # The file token.pickle stores the user's access and refresh tokens, and is\r\n # created automatically when the authorization flow completes for the first\r\n # time.\r\n if os.path.exists('token.pickle'):\r\n with open('token.pickle', 'rb') as token:\r\n creds = pickle.load(token)\r\n # If there are no (valid) credentials available, let the user log in.\r\n if not creds or not creds.valid:\r\n if creds and creds.expired and creds.refresh_token:\r\n creds.refresh(Request())\r\n else:\r\n flow = InstalledAppFlow.from_client_secrets_file(\r\n 'client_secret.json', SCOPES)\r\n creds = flow.run_local_server()\r\n # Save the credentials for the next run\r\n with open('token.pickle', 'wb') as token:\r\n pickle.dump(creds, token)\r\n return creds", "def get_credentials(self):\n home_dir = os.path.expanduser(\"~\")\n credential_dir = os.path.join(home_dir, \".credentials\")\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, \"autoto.json\")\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, self.auth_flags)\n print(\"Storing credentials to \" + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials(self):\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir, self.CRED_FILENAME)\r\n \r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\r\n flow.user_agent = self.APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser(os.getcwd())\n credential_dir = os.path.join(home_dir, '.credentials')\n print(credential_dir)\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n # normal, sane way of doing this that really shouldn't be changed\n #home_dir = os.path.expanduser('~')\n #credential_dir = os.path.join(home_dir, '.credentials')\n #if not os.path.exists(credential_dir):\n # os.makedirs(credential_dir)\n #credential_path = os.path.join(credential_dir,'calendar-python-quickstart.json')\n\n # stupid hacky way that I came up with to fix an issue with running this app as root\n credential_path = os.path.join('./credentials','calendar-python-quickstart.json') \n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(config['client secret file'], SCOPES)\n flow.user_agent = APPLICATION_NAME\n if args:\n credentials = tools.run_flow(flow, store, args)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'credentialv_modify.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'thejam_calendar.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'grader.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, tools.argparser.parse_args(args=[]))\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'bis-python-quickstart.json')\r\n\r\n store = oauth2client.file.Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def get_credentials(self):\r\n \r\n try:\r\n import argparse\r\n #flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()\r\n if self.noauth == True:\r\n flags = tools.argparser.parse_args(args=['--noauth_local_webserver'])\r\n else:\r\n flags = tools.argparser.parse_args(args=[])\r\n except ImportError:\r\n flags = None \r\n \r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,'sheets.googleapis.com-allstarbot.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n secret = Path(self.CLIENT_SECRET_FILE)\r\n if secret.exists():\r\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\r\n else:\r\n print(\"client_secret.json not found, using env vars\")\r\n if not os.environ.get('client_id') or not os.environ.get('client_secret'): \r\n print(\"env vars client_id and client_secret not found. canceling\")\r\n raise Exception(\"client secret error\")\r\n else:\r\n flow = OAuth2WebServerFlow(\r\n os.environ.get('client_id'),\r\n os.environ.get('client_secret'),\r\n self.SCOPES) \r\n \r\n flow.params['access_type'] = 'offline'\r\n flow.user_agent = self.APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def get_credentials(env=\"development\") -> dict:\n load_dotenv()\n credentials = {}\n\n credentials[\"AWS_ACCESS_KEY_ID\"] = os.getenv(\"DEV_AWS_ACCESS_KEY_ID\")\n credentials[\"AWS_SECRET_ACCESS_KEY\"] = os.getenv(\n \"DEV_AWS_SECRET_ACCESS_KEY\")\n credentials[\"AWS_REGION\"] = os.getenv(\"DEV_AWS_REGION\")\n\n if env == \"production\":\n credentials[\"AWS_ACCESS_KEY_ID\"] = os.getenv(\"PROD_AWS_ACCESS_KEY_ID\")\n credentials[\"AWS_SECRET_ACCESS_KEY\"] = os.getenv(\n \"PROD_AWS_SECRET_ACCESS_KEY\")\n credentials[\"AWS_REGION\"] = os.getenv(\"PROD_AWS_REGION\")\n\n return credentials", "def get_credentials():\n store = Storage(CREDENTIAL_PATH)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, None)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'reseller-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'calendar-python-quickstart.json')\r\n\r\n store = oauth2client.file.Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def get_appengine_credentials():\n return get_credentials()", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n credential_dir = os.path.realpath('.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path) # stores the users credentials --> TODO: put in database\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n\n credentials = tools.run_flow(flow, store, flags)\n\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,'drive-python-quickstart.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'clockwise.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_creds():\n\tcredentials = None\n\tif os.path.exists('token.pickle'):\n\t\twith open('token.pickle', 'rb') as token:\n\t\t\tcredentials = pickle.load(token)\n\t# If there are no (valid) credentials available, let the user log in.\n\tif not credentials or not credentials.valid:\n\t\tif credentials and credentials.expired and credentials.refresh_token:\n\t\t\tcredentials.refresh(Request())\n\t\telse:\n\t\t\tflow = InstalledAppFlow.from_client_secrets_file('config/sa.json', SCOPES)\n\t\t\tcredentials = flow.run_local_server(port=0)\n\t\t# Save the credentials for the next run\n\t\twith open('token.pickle', 'wb') as token:\n\t\t\tpickle.dump(credentials, token)\n\treturn credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sally.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'appsactivity-python-showtime.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n print('Storing credentials to ' + credential_path)\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def _get_credentials(self):\n if self.config_file:\n with open(self.config_file) as f:\n config_str = f.read()\n credentials_dict = json.loads(config_str)\n self.credentials = credentials_dict[self.account][self.auth_type]\n else:\n self.credentials = {\n \"account\": os.environ.get('SNOWSQL_ACCOUNT'),\n \"user\": os.environ.get('SNOWSQL_USER'),\n \"password\": os.environ.get('SNOWSQL_PWD')\n }", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n store = Storage(CLIENT_CREDENTIALS_FILE)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + CLIENT_CREDENTIALS_FILE)\n return credentials", "def get_credentials(self):\n return PlainCredentials(self.user_name, self.password)", "def new_credentials(site_name, user_name, password):\n new_credentials = Credentials(site_name, user_name, password)\n return new_credentials", "def get_credentials() -> client.Credentials:\n\n credential_path = os.path.join(HOME_DIR, \"google-credentials.json\")\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(os.path.join(HOME_DIR, CLIENT_SECRET_FILE), SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n # This attempts to open an authorization page in the default web browser, and asks the user\n # to grant the bot access to their data. If the user grants permission, the run_flow()\n # function returns new credentials.\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print(\"Storing credentials to \" + credential_path)", "def get_credentials():\n try:\n import argparse\n flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()\n except ImportError:\n flags = None\n\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'appsactivity-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(GoogleGsuiteAPI.CLIENT_SECRET_FILE, GoogleGsuiteAPI.SCOPES)\n flow.user_agent = GoogleGsuiteAPI.APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def set_credentials():", "def __create_credentials(self, scopes):\n creds = None\n\n # The File token.pickle stores the user's access and refresh tokens,\n # and is created automatically whne the flow compleates for the first time.\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # if there are no valid credentials available, let the user log in\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', scopes)\n creds = flow.run_local_server(port=0)\n # save the creds for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n return creds", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'admin-directory_v1-NestedGroupSync.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n print 'Storing credentials to' + credential_path\n return credentials", "def get_creds():\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('inputs/token.pickle'):\n with open('inputs/token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'inputs/credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('inputs/token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n return creds", "def credentials(self):\n return CurrentProject().config.credentials[self.key]", "def _get_credentials(self):\n\n scopes = 'https://www.googleapis.com/auth/drive'\n client_secret_file = '%s/config/client_secret.json' % PROJECT_DIR\n application_name = 'Drive API Quickstart'\n\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n\n credential_path = os.path.join(credential_dir, 'drive-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(client_secret_file, scopes)\n flow.user_agent = application_name\n\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n\n return credentials", "def newcred(self):\n return {'login': input('username: '),\n 'password': getpass.getpass()}", "def get_credentials():\n credential_dir = os.getcwd()\n credential_path = os.path.join(credential_dir,\n 'smarking_error_check.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n #home_dir = os.path.expanduser('~')\n home_dir = os.path.expanduser('/home/pi/')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'gmail-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def auth(secrets: Dict) -> ClientSecretCredential:\n\n try:\n credential = ClientSecretCredential(\n tenant_id=secrets.get('tenant_id'),\n client_id=secrets.get('client_id'),\n client_secret=secrets.get('client_secret'),\n authority=urlparse(secrets.get('cloud').endpoints.active_directory).hostname\n )\n except ValueError as e:\n raise InterruptExecution(str(e))\n yield credential", "def get_credentials(self):\n if getattr(self, 'credentials', None):\n return self.credentials\n\n scopes = settings.SCOPES\n client_secret_file = settings.CLIENT_SECRET_FILE\n application_name = 'Google Sheets API Python Quickstart'\n\n home_dir = os.path.expanduser(settings.CREDENTIALS_DIRECTORY)\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(client_secret_file, scopes)\n flow.user_agent = application_name\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n # print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n return ServiceAccountCredentials.from_json_keyfile_dict(SERVICE_ACCOUNT, scopes = SCOPES)", "def get_credentials(self):\n home_dir = os.path.expanduser('~')\n # credential_dir = os.path.join(home_dir, '.credentials')\n credential_dir = '.credentials'\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'drive-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(GoogleDocsConverter.CLIENT_SECRET_FILE, GoogleDocsConverter.SCOPES)\n flow.user_agent = GoogleDocsConverter.APPLICATION_NAME\n if self.flags:\n credentials = tools.run_flow(flow, store, self.flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def google_creds() -> object:\n # If modifying these scopes, delete the file token.pickle.\n SCOPES = ['https://www.googleapis.com/auth/drive.file',\n 'https://www.googleapis.com/auth/drive',\n 'https://www.googleapis.com/auth/drive.activity',\n 'https://www.googleapis.com/auth/spreadsheets'\n ]\n\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n return creds", "def make_instance(self, include_optional):\n # model = openapi_client.models.cred_credential.CredCredential() # noqa: E501\n if include_optional :\n return CredCredential(\n id = '', \n account_guid = '', \n account_id = '', \n api_token = openapi_client.models.common/secret.common.Secret(\n encrypted = '', \n plain = '', ), \n ca_cert = '', \n description = '', \n external = True, \n last_modified = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), \n owner = '', \n role_arn = '', \n secret = openapi_client.models.common/secret.common.Secret(\n encrypted = '', \n plain = '', ), \n tokens = openapi_client.models.cred/temporary_token.cred.TemporaryToken(\n aws_access_key_id = '', \n aws_secret_access_key = openapi_client.models.common/secret.common.Secret(\n encrypted = '', \n plain = '', ), \n duration = 56, \n expiration_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), \n token = openapi_client.models.common/secret.common.Secret(\n encrypted = '', \n plain = '', ), ), \n type = '[\\\"aws\\\",\\\"azure\\\",\\\"gcp\\\",\\\"ibmCloud\\\",\\\"apiToken\\\",\\\"githubToken\\\",\\\"basic\\\",\\\"dtr\\\",\\\"kubeconfig\\\",\\\"certificate\\\"]', \n use_aws_role = True\n )\n else :\n return CredCredential(\n )", "def create_new_credential(account,userName,password):\n new_credential = Credentials(account,userName,password)\n return new_credential", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'client_secret_OCR.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n print(\"Current folder: \" + os.getcwd())\n flow = client.flow_from_clientsecrets(\n \"../../\" + CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials(self):\n return self.credentials", "def get_credentials():\n # Get the credential\n if os.path.exists(os.getenv(\"GCP_AUTOMATION_CONFIG\")):\n credential_location = os.getenv(\"GCP_AUTOMATION_CONFIG\")\n with open(credential_location) as f:\n credential_location = json.load(f)\n credential = credential_location['Config'][0]['Authentication']\n log.info(f\"Retrieved credentail location as {credential}\")\n else:\n raise ValueError(\"Error in get_credentials function when calling 'GCP_AUTOMATION_CONFIG'\")\n\n # Construct the credentials request\n try:\n # Turn provided string into a filepath\n credentials = service_account.Credentials.from_service_account_file(\n filename=credential,\n scopes=[\"https://www.googleapis.com/auth/cloud-platform\"],\n )\n log.info(\"Credentials object constructed from service account file\")\n return credentials\n except Exception as e:\n return e", "def _obtain_web_app_creds(self) -> Credentials:\n info = {\n \"client_id\": self._raw_credentials.get(\"client_id\"),\n \"client_secret\": self._raw_credentials.get(\"client_secret\"),\n \"refresh_token\": self._raw_credentials.get(\"refresh_token\"),\n }\n creds = Credentials.from_authorized_user_info(info)\n if creds.expired:\n creds.refresh(Request())\n self._creds = creds", "def get_credentials( flags=None ):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_creds(\n config: Config=default):\n config_path = config.credential_path\n scopes = config.scopes\n\n logger.info('loading token')\n logger.debug(f'config_path: {config_path}')\n config_path = Path(config_path).expanduser()\n store = file.Storage(config_path/'token.json')\n creds = store.get()\n\n if not creds or creds.invalid:\n # Ask the user to give the correct permissions.\n logger.info('loading credentials')\n flow = client.flow_from_clientsecrets(\n config_path/'client_id.json',\n scopes)\n\n arguments = sys.argv\n sys.argv = sys.argv[0:1]\n # This line is why we need to remove the arguments from sys.argv\n # If you find a better way to get it to work, i'm buying it\n creds = tools.run_flow(flow, store)\n sys.argv = arguments\n\n return creds", "def get_credentials(self):\n credential_path = 'annette/data/gmail-credentials.json'\n\n store = Storage(credential_path)\n credentials = store.get()\n flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()\n\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file, self.scopes)\n flow.user_agent = self.application_name\n credentials = tools.run_flow(flow, store, flags)\n _utils.logger.debug('Storing credentials to ' + credential_path)\n\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('gmail', 'v1', http=http, cache_discovery=False)\n return service", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def credentials(self) -> Mapping:", "def _get_credentials(flags):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-visualizerhelptext.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(resource_path(CLIENT_SECRET_FILE), SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(\n credential_dir, 'sheets.googleapis.com-python-quickstart.json'\n )\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def GetCredentials(self):\n return self._session.get(_CREDENTIAL_KEY, credentials.MapdCredentials())", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'fb-drive.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def generate_client_credentials(confidential):\n client_id = random_str(40)\n client_secret = None\n hashed_secret = None\n if confidential:\n client_secret = random_str(55)\n hashed_secret = bcrypt.hashpw(\n client_secret.encode(\"utf-8\"), bcrypt.gensalt()\n ).decode(\"utf-8\")\n return client_id, client_secret, hashed_secret", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def _obtain_service_account_creds(self) -> service_account.Credentials:\n credentials_json = self._raw_credentials.get(\"credentials_json\")\n admin_email = self._raw_credentials.get(\"email\")\n account_info = self._load_account_info(credentials_json)\n creds = service_account.Credentials.from_service_account_info(account_info, scopes=SCOPES)\n self._creds = creds.with_subject(admin_email)", "def get_credentials(self):\n #### DONT EDIT.\n SCOPES = ['https://spreadsheets.google.com/feeds',\n 'https://www.googleapis.com/auth/drive']\n CLIENT_SECRET_FILE = 'client_secret.json'\n APPLICATION_NAME = 'reporter'\n ####\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'reporter_creds.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store)\n return credentials", "def get_credentials():\n credentials_path = os.path.join(CREDENTIALS_DIR, CREDENTIALS_FILE)\n store = oauth2client.file.Storage(credentials_path)\n credentials = store.locked_get()\n\n if not credentials or credentials.invalid:\n client_secret_path = os.path.join(CREDENTIAL_DIR, CLIENT_SECRET_FILE)\n flow = client.flow_from_clientsecrets(client_secret_path, \n scope='https://www.googleapis.com/auth/admin.directory.resource.calendar',\n redirect_uri='urn:ietf:wg:oauth:2.0:oob')\n\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n\n print(\"Storing credentials to: \" + credentials_path)\n\n\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'sheets.googleapis.com-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'google-photos-stats.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, flags)\n print('Storing credentials to ' + credential_path)\n return credentials", "def getCredentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'gmail-python-spam-filter.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(args.clientSecretFile, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if args:\n credentials = tools.run_flow(flow, store, args)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = './ignore' #os.path.expanduser('./')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = './ignore' #os.path.expanduser('./')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def _GetCredentials():\n return service_account.Credentials.from_service_account_file(\n KEY_FILE, scopes=_SCOPES)", "def create_user_credentials(storage_type, storage_id, space_name, client_ip,\n user_details):\n user_id = user_details[\"id\"]\n if user_id == \"0\":\n return PosixCredentials(0, 0)\n\n uid = gid = gen_storage_id(user_id)\n return PosixCredentials(uid, gid)", "def get_credentials(self, **kwargs):\n creds_file = os.path.join(kwargs['user_dir'], 'credentials.json')\n\n # Getting credentials from Storage\n store = file.Storage(creds_file)\n creds = store.get()\n\n # Validating or refreshing credentials, if necessary\n if creds is None or creds.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file,\n self.scopes)\n creds = tools.run_flow(flow, store)\n elif creds.access_token_expired:\n creds.refresh(httplib2.Http())\n else:\n pass\n\n return creds", "def get_creds():\n with open(CREDS_PATH, 'r') as creds_file:\n creds = json.load(creds_file)\n return creds['uname'], creds['pword']", "def get_credentials(self):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'homework_logger-gmail-api.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\n flow.user_agent = self.APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def authorize_credentials():\n credentials = STORAGE.get()\n # If the credentials doesn't exist in the storage location then run the flow\n if credentials is None or credentials.invalid:\n flow = flow_from_clientsecrets(CREDENTIAL_JSON, scope=SCOPE)\n http = httplib2.Http()\n credentials = run_flow(flow, STORAGE, http=http)\n return credentials", "def getCredentials(scopes):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,'admin-directory_v1-python-quickstart.json')\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, scopes)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def password_builder():\n password = Credentials.password_buidler()\n return password", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n credential_dir = os.path.dirname(os.path.realpath(CLIENT_SECRET_FILE))\n credential_path = os.path.join(\n credential_dir, 'sheets.googleapis.com-endosys-events.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials(account):\n credential_dir = os.path.join(HOME_DIR, META_DIR, account, \"credentials\")\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'pyDrive.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def credentials(self):\n if self._credentials is None:\n all_credentials = self.registry_client.registries.list_credentials(\n self.resources.group.name,\n self.registry.name,\n )\n first_password = next(iter(all_credentials.passwords)).value\n self._credentials = LoginCredentials(\n all_credentials.username,\n first_password,\n )\n return self._credentials" ]
[ "0.7112774", "0.7041544", "0.7016884", "0.68389493", "0.68389493", "0.6838073", "0.683158", "0.68242747", "0.6818594", "0.6787863", "0.6783995", "0.67665184", "0.67437685", "0.66767615", "0.6668108", "0.6659616", "0.66587085", "0.66504437", "0.6628971", "0.66179526", "0.6613303", "0.6613303", "0.6613303", "0.6613303", "0.6613303", "0.6613208", "0.6605504", "0.6600014", "0.6598708", "0.6596074", "0.6589082", "0.6533473", "0.65313655", "0.65313655", "0.6507005", "0.6477162", "0.6448734", "0.6435991", "0.6423615", "0.6389826", "0.6378326", "0.63726616", "0.634594", "0.6337268", "0.6319214", "0.6315225", "0.6299059", "0.6286805", "0.6284753", "0.6278469", "0.6278154", "0.627351", "0.62677276", "0.626058", "0.6247816", "0.6246154", "0.62416714", "0.62357956", "0.62294", "0.6221649", "0.6215473", "0.6215003", "0.62144685", "0.62127304", "0.62127304", "0.62127304", "0.62127304", "0.6211406", "0.6196364", "0.6188885", "0.6185453", "0.6180612", "0.6179644", "0.61783016", "0.6176725", "0.6176725", "0.6176725", "0.6176725", "0.61762273", "0.6171758", "0.61648047", "0.61599636", "0.6156031", "0.6150844", "0.6148743", "0.6143441", "0.6134762", "0.6134762", "0.6134419", "0.61342305", "0.61313957", "0.6124548", "0.6117851", "0.6116317", "0.61119986", "0.610732", "0.6099029", "0.6098688", "0.60942864", "0.60879797" ]
0.6263718
53
Deprecated. Alias for `get_credentials()`.
def get_appengine_credentials(): return get_credentials()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_credentials(self):\n return self.credentials", "def GetUserCredentials():\n email = options.email\n if email is None:\n email = GetEmail(\"Email (login for uploading to %s)\" % options.server)\n password = getpass.getpass(\"Password for %s: \" % email)\n return (email, password)", "def get_credentials(self):\n return PlainCredentials(self.user_name, self.password)", "def get_credentials_format(cls):\n raise NotImplementedError", "def _config_credentials_get():\n user = input(\"username:\")\n password = getpass.getpass()\n url = input(\"url:\")\n return user, password, url", "def get_credentials():\n username = input(\"Username: \")\n password = getpass.getpass(prompt='Password: ')\n return username, password", "def get_credentials():\n store = Storage(CREDENTIAL_PATH)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, None)\n return credentials", "def GetCredentials(self):\n return self._session.get(_CREDENTIAL_KEY, credentials.MapdCredentials())", "def get_auth(self, username, password):\n raise NotImplementedError()", "def get_creds():\n\tcredentials = None\n\tif os.path.exists('token.pickle'):\n\t\twith open('token.pickle', 'rb') as token:\n\t\t\tcredentials = pickle.load(token)\n\t# If there are no (valid) credentials available, let the user log in.\n\tif not credentials or not credentials.valid:\n\t\tif credentials and credentials.expired and credentials.refresh_token:\n\t\t\tcredentials.refresh(Request())\n\t\telse:\n\t\t\tflow = InstalledAppFlow.from_client_secrets_file('config/sa.json', SCOPES)\n\t\t\tcredentials = flow.run_local_server(port=0)\n\t\t# Save the credentials for the next run\n\t\twith open('token.pickle', 'wb') as token:\n\t\t\tpickle.dump(credentials, token)\n\treturn credentials", "def _basic_auth_credentials(self) -> tuple[str, str] | None:\n return None", "def test_deprecation(self):\n self.getDeprecatedModuleAttribute(\n \"twisted.cred.credentials\",\n \"UsernameHashedPassword\",\n _uhpVersion,\n \"Use twisted.cred.credentials.UsernamePassword instead.\",\n )", "def credentials(self) -> Optional[pulumi.Input['CredentialsArgs']]:\n return pulumi.get(self, \"credentials\")", "def get_credentials(server: str) -> Tuple[str, int, str]:\n\n try:\n host, port, passwd = Credentials.from_string(server)\n except InvalidCredentials:\n try:\n host, port, passwd = CONFIG.servers[server]\n except KeyError:\n LOGGER.error('No such server: %s.', server)\n exit(2)\n\n if passwd is None:\n try:\n passwd = getpass('Password: ')\n except (KeyboardInterrupt, EOFError):\n print()\n LOGGER.error('Aborted by user.')\n exit(3)\n\n return (host, port, passwd)", "def get_credentials():\n store = Storage(CLIENT_CREDENTIALS_FILE)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + CLIENT_CREDENTIALS_FILE)\n return credentials", "def get_credentials(self, authenticator_id):\n pass", "def authenticate_credentials(self, **credentials):\n return None", "def credentials(self) -> Mapping:", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'credentialv_modify.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def set_credentials():", "def use_cred():\n prompt = \"Use Credentials? (N for Anonymous)\"\n return query_yes_no(question=prompt, default=\"no\")", "def get_credentials():\n credentials, _project_id = google.auth.default(scopes=SCOPES)\n\n # Credentials from the GCloud SDK, for example, do not implement Signing.\n assert isinstance(credentials, google.auth.credentials.Signing), \\\n \"Unsupported credential kind; credentials must implement Signing\"\n\n return credentials", "def get_credentials(service_name=\"dataforSeo\", uname=\"[email protected]\"):\n pw = keyring.get_password(service_name, uname)\n return [uname, pw]", "def new_credentials(site_name, user_name, password):\n new_credentials = Credentials(site_name, user_name, password)\n return new_credentials", "def get_credentials(self, oid=None):\n path = '/credentials'\n key = 'credentials'\n if oid is not None:\n path = '%s/%s' % (path, oid)\n key = 'credential'\n res = self.client.call(path, 'GET', data='', token=self.token)\n self.logger.debug('Get openstack credentials: %s' % truncate(res))\n try:\n return res[0][key]\n except:\n raise OpenstackError('No credentials found')", "def get_credentials(servise: str) -> google.oauth2.credentials.Credentials:\n\n # SQL query to get the credentials for the current user from servise credentials table\n query = f\"\"\"\n SELECT token, token_uri, client_id, refresh_token, client_secret, scopes\n FROM {servise}_credentials\n WHERE user_id=?;\n \"\"\"\n\n # Get the credentials\n with connect(DATABASE) as db:\n credentials = db.execute(query, (session[\"user_id\"],)).fetchone()\n\n # Return None if it doesn't exist it the database\n if not credentials: return None\n\n # Transfer the credentials to a dictionary\n credentials_dict = {\n \"token\": credentials[0],\n \"token_uri\": credentials[1],\n \"client_id\": credentials[2],\n \"refresh_token\": credentials[3],\n \"client_secret\": credentials[4],\n \"scopes\": None if credentials[5] is None else credentials[5].split(\" \")\n }\n\n # Return a google Credentials object\n return google.oauth2.credentials.Credentials(**credentials_dict)", "def getCredentials(self):\n if self.result(): # Accepted?\n username = self.username_le.text()\n password = \"\"\n if self.askpassword:\n password = self.password_le.text()\n\n return username, password\n\n raise CredentialDialogReject()", "def get_account_credentials(call):\n account = call.data.get(CONF_SPOTIFY_ACCOUNT)\n user = username\n pwd = password\n if account is not None:\n _LOGGER.debug('setting up with different account than default %s', account)\n user = accounts.get(account).get(CONF_USERNAME)\n pwd = accounts.get(account).get(CONF_PASSWORD)\n return user, pwd", "def get_credentials():\n credentials = tools.get_credentials_file()\n session_credentials = session.get_session_credentials()\n for credentials_key in credentials:\n\n # checking for not false, but truthy value here is the desired behavior\n session_value = session_credentials.get(credentials_key)\n if session_value is False or session_value:\n credentials[credentials_key] = session_value\n return credentials", "def credentials(self) -> HTTPBasicAuth:\n if self.user is None or self.password is None:\n return None\n else:\n return HTTPBasicAuth(self.user, self.password)", "def get_creds():\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('inputs/token.pickle'):\n with open('inputs/token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'inputs/credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('inputs/token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n return creds", "def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'gmail-python-spam-filter.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def get_creds(self):\n return self.creds", "def list_credentials(self, **_params):\r\n return self.get(self.credentials_path, params=_params)", "def list_credentials(user):\n return Credentials.list_credentials(user)", "def _GetCredentials():\n return service_account.Credentials.from_service_account_file(\n KEY_FILE, scopes=_SCOPES)", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'grader.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, tools.argparser.parse_args(args=[]))\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials(service, sandbox=True):\n srv = service.lower()\n srv_param = resolve_service(srv)\n if srv_param is None:\n return\n\n client_id, client_secret, scope, storage = srv_param\n if srv == 'evernote':\n return evernote_auth(client_id, client_secret, storage, sandbox)\n else:\n return google_auth(client_id, client_secret, scope, storage)", "def get_creds():\n with open(CREDS_PATH, 'r') as creds_file:\n creds = json.load(creds_file)\n return creds['uname'], creds['pword']", "def find_credential(account):\n return Credentials.find_by_username(account)", "def newcred(self):\n return {'login': input('username: '),\n 'password': getpass.getpass()}", "def get_credentials():\n\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'appsactivity-python-showtime.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n print('Storing credentials to ' + credential_path)\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def _get_credentials(flags):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'drive-python-visualizerhelptext.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def getCredential(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def valid_credentials():\n if 'credentials' not in flask.session:\n return None\n\n credentials = client.OAuth2Credentials.from_json(\n flask.session['credentials'])\n\n if (credentials.invalid or\n credentials.access_token_expired):\n return None\n return credentials", "def valid_credentials():\n if 'credentials' not in flask.session:\n return None\n\n credentials = client.OAuth2Credentials.from_json(\n flask.session['credentials'])\n\n if (credentials.invalid or\n credentials.access_token_expired):\n return None\n return credentials", "def get_credentials():\n return ServiceAccountCredentials.from_json_keyfile_dict(SERVICE_ACCOUNT, scopes = SCOPES)", "def getcreds():\n global user\n global password\n if not user:\n user = input(\"Please enter your username:\\n\")\n if not password:\n password = getpass.getpass(\"Please enter password:\\n\")", "def get_credentials(self):\n try:\n with open(self.credentials_file, 'r') as fh_credentials:\n credentials_dict = json.loads(fh_credentials.read())\n return credentials_dict\n except IOError:\n self.reset_credentials()\n with open(self.credentials_file, 'r') as fh_credentials:\n return json.loads(fh_credentials.read())", "def display_credential():\n return CredentialsData.display_credentials()", "def get_credential(self, key):\n return self.creds.get(key, '')", "def valid_credentials():\n if 'credentials' not in flask.session:\n return None\n\n credentials = client.OAuth2Credentials.from_json(\n flask.session['credentials'])\n\n if (credentials.invalid or credentials.access_token_expired):\n return None\n return credentials", "def credentials(self) -> pulumi.Output[Optional['outputs.CredentialsResponse']]:\n return pulumi.get(self, \"credentials\")", "def auth(self):\n return self.creds(\"[email protected]\", cookie=\"USERTOKEN: authcookie\")", "def get_provider_credentials(provider):\n logging.info('Getting provider credentials for {}'.format(provider))\n uppercase_provider = provider.upper()\n username_variable = '{}_USERNAME'.format(uppercase_provider)\n authentication_variable = '{}_AUTHENTICATION'.format(uppercase_provider)\n username = os.environ.get(username_variable, '')\n authentication = os.environ[authentication_variable]\n return authentication, username", "def credentials_from_cmd(self):\n username = raw_input(\"Email:\")\n pw = getpass.getpass()\n return username, pw", "def credentials(self):\n return self._credentials", "def _get_credentials(self):\n if self.config_file:\n with open(self.config_file) as f:\n config_str = f.read()\n credentials_dict = json.loads(config_str)\n self.credentials = credentials_dict[self.account][self.auth_type]\n else:\n self.credentials = {\n \"account\": os.environ.get('SNOWSQL_ACCOUNT'),\n \"user\": os.environ.get('SNOWSQL_USER'),\n \"password\": os.environ.get('SNOWSQL_PWD')\n }", "def get_credentials(self):\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir, self.CRED_FILENAME)\r\n \r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\r\n flow.user_agent = self.APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def credentials(self):\n if self.user and self.is_authenticated():\n return AuthCredentials(['authenticated'] + self.user.permissions)\n else:\n return AuthCredentials()", "def get_credentials():\n\thome_dir = os.path.expanduser('~')\n\tcredential_dir = os.path.join(home_dir, '.credentials')\n\tif not os.path.exists(credential_dir):\n\t\tos.makedirs(credential_dir)\n\tcredential_path = os.path.join(credential_dir, \n\t\t\t\t\t\t\t\t\t'facebook_updater.json')\n\t\t\t\t\t\t\t\t\t\n\tstore = oauth2client.file.Storage(credential_path)\n\tcredentials = store.get()\n\tif not credentials or credentials.invalid:\n\t\tflow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n\t\tflow.user_agent = APPLICATION_NAME\n\t\tif flags:\n\t\t\tcredentials = tools.run_flow(flow, store, flags)\n\t\tprint ('Storing credentials to ' + credential_path)\n\treturn credentials", "def get_credentials(self, **kwargs):\n creds_file = os.path.join(kwargs['user_dir'], 'credentials.json')\n\n # Getting credentials from Storage\n store = file.Storage(creds_file)\n creds = store.get()\n\n # Validating or refreshing credentials, if necessary\n if creds is None or creds.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file,\n self.scopes)\n creds = tools.run_flow(flow, store)\n elif creds.access_token_expired:\n creds.refresh(httplib2.Http())\n else:\n pass\n\n return creds", "def auth_credentials(self) -> Optional[Sequence['outputs.AuthCredentialResponse']]:\n return pulumi.get(self, \"auth_credentials\")", "def GetUserCredentials(self):\r\n # Create a local alias to the email variable to avoid Python's crazy\r\n # scoping rules.\r\n global keyring\r\n email = self.email\r\n if email is None:\r\n email = GetEmail(\"Email (login for uploading to %s)\" % self.server)\r\n password = None\r\n if keyring and not email in self.accounts_seen:\r\n try:\r\n password = keyring.get_password(self.host, email)\r\n except:\r\n # Sadly, we have to trap all errors here as\r\n # gnomekeyring.IOError inherits from object. :/\r\n print \"Failed to get password from keyring\"\r\n keyring = None\r\n if password is not None:\r\n print \"Using password from system keyring.\"\r\n self.accounts_seen.add(email)\r\n else:\r\n password = getpass.getpass(\"Password for %s: \" % email)\r\n if keyring:\r\n answer = raw_input(\"Store password in system keyring?(y/N) \").strip()\r\n if answer == \"y\":\r\n keyring.set_password(self.host, email, password)\r\n self.accounts_seen.add(email)\r\n return (email, password)", "def GetAccountNameAndPassword(credential,\n credentials_path=DEFAULT_CREDENTIAL_PATH):\n if (credentials_path == DEFAULT_CREDENTIAL_PATH and not\n os.path.exists(DEFAULT_CREDENTIAL_PATH)):\n cloud_storage.GetIfChanged(\n DEFAULT_CREDENTIAL_PATH, DEFAULT_CREDENTIAL_BUCKET)\n\n with open(credentials_path, 'r') as f:\n credentials = json.load(f)\n c = credentials.get(credential)\n return c['username'], c['password']", "def credential(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"credential\")", "def _auth_via_token(self) -> Auth.contextmgr:\n warnings.warn(\n \"Authentication via personal access token is deprecated. \"\n \"Please, use the password authentication to avoid inconsistencies.\",\n AirflowProviderDeprecationWarning,\n )\n tableau_auth = PersonalAccessTokenAuth(\n token_name=self.conn.extra_dejson[\"token_name\"],\n personal_access_token=self.conn.extra_dejson[\"personal_access_token\"],\n site_id=self.site_id,\n )\n return self.server.auth.sign_in_with_personal_access_token(tableau_auth)", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials(self):\r\n \r\n try:\r\n import argparse\r\n #flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()\r\n if self.noauth == True:\r\n flags = tools.argparser.parse_args(args=['--noauth_local_webserver'])\r\n else:\r\n flags = tools.argparser.parse_args(args=[])\r\n except ImportError:\r\n flags = None \r\n \r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,'sheets.googleapis.com-allstarbot.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n secret = Path(self.CLIENT_SECRET_FILE)\r\n if secret.exists():\r\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\r\n else:\r\n print(\"client_secret.json not found, using env vars\")\r\n if not os.environ.get('client_id') or not os.environ.get('client_secret'): \r\n print(\"env vars client_id and client_secret not found. canceling\")\r\n raise Exception(\"client secret error\")\r\n else:\r\n flow = OAuth2WebServerFlow(\r\n os.environ.get('client_id'),\r\n os.environ.get('client_secret'),\r\n self.SCOPES) \r\n \r\n flow.params['access_type'] = 'offline'\r\n flow.user_agent = self.APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def get_conn(self) -> Auth.contextmgr:\n if self.conn.login and self.conn.password:\n return self._auth_via_password()\n if \"token_name\" in self.conn.extra_dejson and \"personal_access_token\" in self.conn.extra_dejson:\n return self._auth_via_token()\n raise NotImplementedError(\"No Authentication method found for given Credentials!\")", "def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,'drive-python-quickstart.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def get_credentials():\n credentials_path = os.path.join(CREDENTIALS_DIR, CREDENTIALS_FILE)\n store = oauth2client.file.Storage(credentials_path)\n credentials = store.locked_get()\n\n if not credentials or credentials.invalid:\n client_secret_path = os.path.join(CREDENTIAL_DIR, CLIENT_SECRET_FILE)\n flow = client.flow_from_clientsecrets(client_secret_path, \n scope='https://www.googleapis.com/auth/admin.directory.resource.calendar',\n redirect_uri='urn:ietf:wg:oauth:2.0:oob')\n\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n\n print(\"Storing credentials to: \" + credentials_path)\n\n\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'bis-python-quickstart.json')\r\n\r\n store = oauth2client.file.Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def datasource_auth_credentials(self) -> Optional[pulumi.Input['SecretStoreBasedAuthCredentialsArgs']]:\n return pulumi.get(self, \"datasource_auth_credentials\")", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(config['client secret file'], SCOPES)\n flow.user_agent = APPLICATION_NAME\n if args:\n credentials = tools.run_flow(flow, store, args)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n credential_dir = os.getcwd()\n credential_path = os.path.join(credential_dir,\n 'smarking_error_check.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials(options, environment):\n if options[\"--username\"] or options[\"--auth\"]:\n if not options[\"--username\"]:\n options[\"<username>\"] = lib.prompt(\n \"Please enter the username for %s...\" % environment\n )\n if not options[\"--password\"]:\n options[\"<password>\"] = lib.prompt(\n \"Please enter the password for %s...\" % environment, secret=True\n )\n return options", "def authenticate(credentials):", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'thejam_calendar.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n #home_dir = os.path.expanduser('~')\n home_dir = os.path.expanduser('/home/pi/')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'gmail-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def authorize_credentials():\n credentials = STORAGE.get()\n # If the credentials doesn't exist in the storage location then run the flow\n if credentials is None or credentials.invalid:\n flow = flow_from_clientsecrets(CREDENTIAL_JSON, scope=SCOPE)\n http = httplib2.Http()\n credentials = run_flow(flow, STORAGE, http=http)\n return credentials", "def get_credentials():\n try:\n import argparse\n flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()\n except ImportError:\n flags = None\n\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'appsactivity-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(GoogleGsuiteAPI.CLIENT_SECRET_FILE, GoogleGsuiteAPI.SCOPES)\n flow.user_agent = GoogleGsuiteAPI.APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n\n SCOPES = 'https://www.googleapis.com/auth/gmail.readonly '\n\n while not os.path.exists(args.clientSecretFile):\n logging.fatal(\"Client secrets file does not exist: %s . You probably need to download this from the Google API console.\", args.clientSecretFile)\n sleep(10)\n\n credentials = None\n\n if os.path.exists(args.credentialsPath):\n credentials = Credentials.from_authorized_user_file(args.credentialsPath, SCOPES)\n\n if not credentials or not credentials.valid:\n flow = InstalledAppFlow.from_client_secrets_file(args.clientSecretFile, SCOPES)\n flow.user_agent = 'prometheus-gmail-exporter'\n\n credentials = flow.run_local_server(port=args.oauthBindPort, bind_addr = args.oauthBindAddr, host = args.oauthHost)\n #credentials = flow.run_local_server()\n\n logging.info(\"Storing credentials to %s\", args.credentialsPath)\n\n with open(args.credentialsPath, 'w', encoding='utf8') as token:\n token.write(credentials.to_json())\n\n\n return credentials", "def password_builder():\n password = Credentials.password_buidler()\n return password", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sally.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def _get_creds_from_token(token):\n cred_string = base64.b64decode(token).decode(\"ascii\")\n username, password = str(cred_string).split(\":\")\n return username, password", "def get_access_credentials_output(backend: Optional[pulumi.Input[str]] = None,\n namespace: Optional[pulumi.Input[Optional[str]]] = None,\n role: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetAccessCredentialsResult]:\n ...", "def create_credentials():\r\n creds = None\r\n # The file token.pickle stores the user's access and refresh tokens, and is\r\n # created automatically when the authorization flow completes for the first\r\n # time.\r\n if os.path.exists('token.pickle'):\r\n with open('token.pickle', 'rb') as token:\r\n creds = pickle.load(token)\r\n # If there are no (valid) credentials available, let the user log in.\r\n if not creds or not creds.valid:\r\n if creds and creds.expired and creds.refresh_token:\r\n creds.refresh(Request())\r\n else:\r\n flow = InstalledAppFlow.from_client_secrets_file(\r\n 'client_secret.json', SCOPES)\r\n creds = flow.run_local_server()\r\n # Save the credentials for the next run\r\n with open('token.pickle', 'wb') as token:\r\n pickle.dump(creds, token)\r\n return creds", "def find_credential(account):\n return Credentials.find_credential(account)", "def GetAuthToken(email, password):\n return AuthToken(email, password).GetAuthToken()", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'reseller-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_jira_auth() -> Tuple[str, str]:\n jira_auth = json.loads(\n s3.get_object(\n Bucket=\"beckon-devops\", Key=\"credentials/beckon_credentials.json\"\n )[\"Body\"].read()\n ).get(\"jira\", {})\n\n return jira_auth.get(\"user\", \"\"), jira_auth.get(\"password\", \"\")", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'fb-drive.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def credentials(self):\n return CurrentProject().config.credentials[self.key]", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'google-photos-stats.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, flags)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials() -> client.Credentials:\n\n credential_path = os.path.join(HOME_DIR, \"google-credentials.json\")\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(os.path.join(HOME_DIR, CLIENT_SECRET_FILE), SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n # This attempts to open an authorization page in the default web browser, and asks the user\n # to grant the bot access to their data. If the user grants permission, the run_flow()\n # function returns new credentials.\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print(\"Storing credentials to \" + credential_path)", "def get_creds(cred_fpath=None, api_path=None):\n if cred_fpath is not None:\n print(\"reading keys from credentials file\")\n keys = pd.read_csv(cred_fpath, sep=\"=\")\n myAccessKey = keys.loc['aws_access_key_id ']['[default]'].strip()\n mySecretKey = keys.loc['aws_secret_access_key ']['[default]'].strip()\n myToken = \"\"\n else:\n r = requests.get(api_path)\n creds = r.json()\n myAccessKey = creds[\"AccessKeyId\"]\n mySecretKey = creds[\"SecretAccessKey\"]\n myToken = creds[\"Token\"]\n return myAccessKey, mySecretKey, myToken", "def get_credentials():\n home_dir = os.path.expanduser(os.getcwd())\n credential_dir = os.path.join(home_dir, '.credentials')\n print(credential_dir)\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials" ]
[ "0.6579635", "0.6387039", "0.63844115", "0.63098377", "0.6289624", "0.6281468", "0.62561184", "0.62231666", "0.61097383", "0.60903406", "0.60723895", "0.60612416", "0.60548604", "0.60009325", "0.5988076", "0.5980738", "0.5970519", "0.59341145", "0.59061664", "0.5883298", "0.5865999", "0.5863041", "0.5850975", "0.58429414", "0.5840186", "0.58384866", "0.58274907", "0.5816954", "0.5775765", "0.5763158", "0.57583517", "0.57329106", "0.572642", "0.5713034", "0.5707974", "0.5707699", "0.5684216", "0.5683578", "0.56699365", "0.5665022", "0.56649345", "0.56620604", "0.5655294", "0.56445736", "0.5641219", "0.5641219", "0.5637422", "0.5635529", "0.5635511", "0.5632196", "0.5628019", "0.5618158", "0.56138283", "0.56087685", "0.5604494", "0.55910355", "0.5588242", "0.5579771", "0.5573222", "0.5565219", "0.5563679", "0.555967", "0.5550565", "0.55459285", "0.5541329", "0.5539673", "0.5535886", "0.55342937", "0.5534058", "0.5532888", "0.5529333", "0.55290484", "0.55290294", "0.55290294", "0.5528885", "0.5528784", "0.5528741", "0.55268717", "0.5521374", "0.55177", "0.55067915", "0.55064374", "0.55050874", "0.55050755", "0.55013293", "0.55009264", "0.54999816", "0.549409", "0.54899114", "0.5489373", "0.54811484", "0.54796666", "0.5476446", "0.5476072", "0.54758894", "0.5474003", "0.54693997", "0.54600596", "0.54577696", "0.54575783" ]
0.67769206
0
Generate a Credentials object from a key file.
def get_service_key_credentials(key_file_path): return service_account.Credentials.from_service_account_file( key_file_path, scopes=SCOPES, )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def from_file(cls, filename, **kwargs):\n return super(Credentials, cls).from_file(filename, **kwargs)", "def _GetCredentials():\n return service_account.Credentials.from_service_account_file(\n KEY_FILE, scopes=_SCOPES)", "def from_json_file(cls, filename: str):\n # Import standard modules\n from json import load\n\n with open(filename) as file_obeject:\n credentials = load(file_obeject)\n\n key = credentials.get('API_KEY')\n secret = credentials.get('API_SECRET')\n url = credentials.get('URL')\n\n if not all([key, secret]):\n err = (\n '`API_KEY` and `API_SECRET` are mandatory attributes.\\n'\n 'Please make sure they are contained in your `.json` file'\n )\n KeyError(err)\n\n return cls(key, secret, url)", "def get_credentials():\n return ServiceAccountCredentials.from_json_keyfile_dict(SERVICE_ACCOUNT, scopes = SCOPES)", "def from_service_account_file(cls, filename, *args, **kwargs):\n credentials = service_account.Credentials.from_service_account_file(filename)\n kwargs[\"credentials\"] = credentials\n return cls(*args, **kwargs)", "def from_service_account_file(cls, filename, *args, **kwargs):\n credentials = service_account.Credentials.from_service_account_file(filename)\n kwargs[\"credentials\"] = credentials\n return cls(*args, **kwargs)", "def from_service_account_file(cls, filename: str, *args, **kwargs):\n credentials = service_account.Credentials.from_service_account_file(filename)\n kwargs[\"credentials\"] = credentials\n return cls(*args, **kwargs)", "def from_service_account_file(cls, filename: str, *args, **kwargs):\n credentials = service_account.Credentials.from_service_account_file(filename)\n kwargs[\"credentials\"] = credentials\n return cls(*args, **kwargs)", "def from_service_account_file(cls, filename: str, *args, **kwargs):\n credentials = service_account.Credentials.from_service_account_file(filename)\n kwargs[\"credentials\"] = credentials\n return cls(*args, **kwargs)", "def get_credentials(key):\n with open(\"credentials.json\", \"r\") as credentials_file:\n credentials_data = json.load(credentials_file)\n\n try:\n return credentials_data[key]\n except KeyError:\n raise KeyError(f\"Credential {key} was not found in file.\")", "def _load_credentials(creds_file=None):\n\n creds = None\n\n # Validate the credentials file\n if not creds_file:\n creds_file = 'credentials.json'\n if not os.path.exists(creds_file):\n creds_file = os.path.join(expanduser('~'), 'credentials.json')\n if not os.path.exists(creds_file):\n raise SystemExit('Could not find a credentials.json file. ' \\\n 'Either pass one as argument or make sure credentials.json exists in ' \\\n 'the current directory or ' + expanduser('~'))\n\n # Creates CACHE_DIR if it does not exist\n # mode 0x777 (the default) is used because the system's umask value is masked out first\n if not os.path.exists(CACHE_DIR):\n os.mkdir(CACHE_DIR)\n\n pickle_filename = os.path.join(CACHE_DIR, 'weechat-gcal-token.pickle')\n\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first time.\n if os.path.exists(pickle_filename):\n with open(pickle_filename, 'rb') as token:\n creds = pickle.load(token)\n\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(creds_file, SCOPES)\n creds = flow.run_local_server(port=0)\n\n # Save the credentials for the next run\n with open(pickle_filename, 'wb') as token:\n pickle.dump(creds, token)\n\n return creds", "def _authenticate_from_file(self, credentials):\n self._gauth.LoadCredentialsFile(credentials)", "def from_service_account_file(cls, filename: str, *args, **kwargs):\n return Controller2Client.from_service_account_file.__func__(Controller2AsyncClient, filename, *args, **kwargs) # type: ignore", "def load_or_create_client_key(key_file):\n # this is based on txacme.endpoint.load_or_create_client_key, but doesn't\n # hardcode the 'client.key' filename\n acme_key_file = FilePath(key_file)\n if acme_key_file.exists():\n logger.info(\"Loading ACME account key from '%s'\", acme_key_file)\n key = serialization.load_pem_private_key(\n acme_key_file.getContent(), password=None, backend=default_backend()\n )\n else:\n logger.info(\"Saving new ACME account key to '%s'\", acme_key_file)\n key = generate_private_key(\"rsa\")\n acme_key_file.setContent(\n key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption(),\n )\n )\n return JWKRSA(key=key)", "def get_credentials_from_file(credentials_file):\n # Change the scope username and password variables to global\n global username\n global password\n try:\n # Open and reads the credentials.pwd file and save the lines in the username and password\n with open(os.path.dirname(__file__) + credentials_file) as credential_file:\n credentials = credential_file.readlines()\n username = credentials[0].strip()\n password = credentials[1].strip()\n\n credential_file.close()\n except FileNotFoundError as error:\n print(error)\n sys.exit(1)", "def load_credential_file(self, path):\r\n c_data = StringIO.StringIO()\r\n c_data.write(\"[Credentials]\\n\")\r\n for line in open(path, \"r\").readlines():\r\n c_data.write(line.replace(\"AWSAccessKeyId\", \"aws_access_key_id\").replace(\"AWSSecretKey\", \"aws_secret_access_key\"))\r\n c_data.seek(0)\r\n self.readfp(c_data)", "def read_key(self, keyfile_name):\n\n with open(keyfile_name, 'rb') as f:\n self.key = f.read()\n self.cryptor = Fernet(self.key)", "def from_service_account_file(cls, filename: str, *args, **kwargs):\n return WebSecurityScannerClient.from_service_account_file.__func__(WebSecurityScannerAsyncClient, filename, *args, **kwargs) # type: ignore", "def get_creds_file(self):\n filename = self.filename\n\n home = str(Path.home())\n filepath = home + os.sep + filename\n self.path = filepath\n if not os.path.isfile(filepath):\n return False\n\n j = json.load(open(filepath))\n self.keys = j\n return j", "def from_service_account_file(cls, filename: str, *args, **kwargs):\n return ServiceControllerClient.from_service_account_file.__func__(ServiceControllerAsyncClient, filename, *args, **kwargs) # type: ignore", "def extract_credentials(path):\n if not os.path.isfile(path):\n raise IOError(None, \"Credential file was not found at %s\" % path)\n\n if os.name == 'posix':\n mode = os.stat(path)[stat.ST_MODE]\n\n if stat.S_IRWXG & mode or stat.S_IRWXO & mode:\n raise IOError(None, \"Credential file cannot be accessible by group or other. Please chmod 600 the credential file.\")\n\n access_key, secret_key = '', ''\n with file(path, 'r') as f:\n for line in (line.strip() for line in f):\n if line.startswith(\"AWSAccessKeyId=\"):\n access_key = line.partition('=')[2]\n elif line.startswith(\"AWSSecretKey=\"):\n secret_key = line.partition('=')[2]\n\n if not access_key or not secret_key:\n raise IOError(None, \"Credential file must contain the keys 'AWSAccessKeyId' and 'AWSSecretKey'\")\n\n return Credentials(access_key, secret_key)", "def load_credentials(cred=\"credentials_prod.json\"):\n if isinstance(cred, dict):\n # Easy way to handle if a function was handed valid credentials\n pass\n elif isinstance(cred, str):\n with open(cred, 'r') as f:\n cred = json.load(f)\n else:\n raise ValueError(\"Invalid input cred={0}\".format(cred))\n\n # Check for correct entries\n cred_keys = [ \"access_token\", \"expires_in\", \"refresh_token\", \"scope\", \"token_type\"]\n for k in cred_keys:\n if k not in cred:\n raise ValueError(\"Credentials missing key {0}\".format(k))\n return cred", "def main(key_file: Optional[str]) -> None:\n # Generate a new 256-bit private key if no key is specified.\n if not key_file:\n customer_key_bytes = os.urandom(32)\n else:\n with open(key_file, \"rb\") as f:\n customer_key_bytes = f.read()\n\n google_public_key = get_google_public_cert_key()\n wrapped_rsa_key = wrap_rsa_key(google_public_key, customer_key_bytes)\n\n b64_key = base64.b64encode(customer_key_bytes).decode(\"utf-8\")\n\n print(f\"Base-64 encoded private key: {b64_key}\")\n print(f\"Wrapped RSA key: {wrapped_rsa_key.decode('utf-8')}\")", "def from_service_account_file(cls, filename: str, *args, **kwargs):\n return Debugger2Client.from_service_account_file.__func__(Debugger2AsyncClient, filename, *args, **kwargs) # type: ignore", "def signed_creds(path):\n scope = [\n 'https://spreadsheets.google.com/feeds',\n 'https://www.googleapis.com/auth/drive'\n ]\n try:\n credentials = ServiceAccountCredentials.from_json_keyfile_name(path, scope)\n except oauth2client.client.CryptoUnavailableError: #Used to bypass error in gspread\n import pip\n import site\n from importlib import reload #`from imp import reload` < python3.3 | `` python2\n pip.main(['install', 'PyOpenSSL'])\n reload(site) #Reloads the sys.path\n credentials = ServiceAccountCredentials.from_json_keyfile_name(path, scope)\n return credentials", "def get_credentials(credentials_filename, application_name, client_secret_file, scopes):\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n credentials_filename)\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(client_secret_file, scopes)\n flow.user_agent = application_name\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n# else: # Needed only for compatibility with Python 2.6\n# credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials(self):\n #\n # Why is this not read from the yaml file?\n path = Path(path_expand(self.credential_file)).resolve()\n if not os.path.exists(path):\n os.makedirs(path)\n\n credentials_path = (path / 'google-drive-credentials.json').resolve()\n print(credentials_path)\n\n store = Storage(credentials_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file,\n self.scopes)\n flow.user_agent = self.application_name\n #\n # SHOUDL THE FLAGS NOT BE SET IN THE YAML FILE OR DOCOPTS OFTHE COMMAND?\n #\n if self.flags:\n credentials = tools.run_flow(flow, store, self.flags)\n\n return credentials", "def _load_credentials(self, path):\r\n with open(path) as file:\r\n raw_credentials = json.load(file)[\"credentials\"]\r\n self._credentials.clear()\r\n for r in raw_credentials: # each app\r\n self._credentials.append(Credentials(\r\n consumer_key=r[\"consumerKey\"],\r\n consumer_secret=r[\"consumerSecret\"],\r\n access_token=r[\"accessToken\"],\r\n access_secret=r[\"accessSecret\"]))", "def _get_credentials(self, client_secret_file, scopes):\n\n # Check cred file exists.\n if not os.path.exists(client_secret_file):\n Console.error(\n f\"Credential file {client_secret_file} does not exists. Check the path and try again.\")\n return None\n\n # Authenticate using service account.\n _credentials = service_account.Credentials.from_service_account_file(\n filename=client_secret_file,\n scopes=scopes)\n return _credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'clockwise.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def _load_key(self, path):\n with open(path, 'r') as f:\n self._key = f.readline().strip()\n self._secret = f.readline().strip()", "def read_key():\n path = os.path.join(os.path.dirname(__file__), 'data')\n f = open(os.path.join(path, 'credential.txt'), 'r')\n key = f.read()\n f.close()\n return key", "def from_service_account_file(cls, filename: str, *args, **kwargs):\n return PipelineServiceClient.from_service_account_file.__func__(PipelineServiceAsyncClient, filename, *args, **kwargs) # type: ignore", "def _GetCredentialsIter(self, credentials_file=None):\n if not credentials_file:\n credentials_file = os.path.join(os.path.dirname(__file__),\n 'credentials.txt')\n if os.path.exists(credentials_file):\n with open(credentials_file) as f:\n for credentials in f:\n username, password = credentials.strip().split(':')\n yield username, password", "def import_credentials(password, cred_file):\n\t\tself.exchanges = decrypt(password, cred_file)", "def read_new_credential(csv_file=None):\n options = {}\n if csv_file is None:\n logger.info(\"Generating configuration with user-specified username + password\")\n username = input(\"Username: \")\n if len(username) == 0:\n raise RuntimeError(\"Username may not be empty\")\n password = getpass.getpass()\n if len(password) == 0:\n raise RuntimeError(\"Password may not be empty\")\n hostname = _validate_hostname(input(\"Hostname (may be empty): \"))\n token_endpoint = input(\"Token endpoint (empty if not applicable): \") or None\n else:\n if os.path.exists(csv_file):\n with open(csv_file, \"r\") as f:\n reader = csv.DictReader(f)\n cred = next(reader)\n username = cred[\"username\"]\n password = cred[\"password\"]\n hostname = cred[\"hostname\"] if \"hostname\" in cred else \"\"\n token_endpoint = cred.get(\"token_endpoint\")\n if \"mechanism\" in cred:\n options[\"method\"] = cred[\"mechanism\"].replace(\"-\", \"_\")\n if \"protocol\" in cred:\n options[\"ssl\"] = cred[\"protocol\"] != \"SASL_PLAINTEXT\"\n if \"ssl_ca_location\" in cred:\n options[\"ssl_ca_location\"] = cred[\"ssl_ca_location\"]\n else:\n raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), csv_file)\n return Auth(username, password, hostname, token_endpoint=token_endpoint, **options)", "def get_access_key(self, keyfile):\n my_key = AccessKey.create_key_from_file(keyfile)\n my_key.store_keys()\n return my_key.key", "def __init__(self, key=None):\n\n self.key = key\n self.cryptor = None\n self.file_ext_targets = ['txt']", "def LoadCredentials(json_credentials_path=None, scope_url=None):\n json_credentials_path = FindCredentialsFile(json_credentials_path)\n\n # This is the way to support both service account credentials (JSON generated\n # from Pantheon) or authenticated users (similar to `gcloud auth login`).\n google_creds = oauth2client.client.GoogleCredentials.from_stream(\n json_credentials_path)\n\n if scope_url is None:\n scope_url = DEFAULT_SCOPE_URL\n\n # We need to rescope the credentials which are currently unscoped.\n scoped_creds = google_creds.create_scoped(scope_url)\n return scoped_creds", "def read_aws_credentials(filename='.aws_credentials.json'):\n\n try:\n with open(filename) as json_data:\n credentials = json.load(json_data)\n\n for variable in ('access_key_id', 'secret_access_key', 'region'):\n if variable not in credentials.keys():\n msg = '\"{}\" cannot be found in {}'.format(variable, filename)\n raise KeyError(msg)\n \n except FileNotFoundError:\n try:\n credentials = {\n 'access_key_id': os.environ['AWS_ACCESS_KEY_ID'],\n 'secret_access_key': os.environ['AWS_SECRET_ACCESS_KEY'],\n 'region': os.environ['AWS_REGION']\n }\n except KeyError:\n msg = 'no AWS credentials found in file or environment variables'\n raise RuntimeError(msg)\n\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'reseller-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def __init__(self, creds_file):\n self.creds_file = creds_file\n self.service = None\n self.creds = None\n self.courses = None\n self.scopes = None\n self.client_id = None\n self.client_secret = None\n self.hostname = None", "def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'bis-python-quickstart.json')\r\n\r\n store = oauth2client.file.Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def import_key(self, filename):\n fields = self.input_file(filename)\n\n if (\"Description\" not in fields or \"Method\" not in fields or\n \"Key length\" not in fields or \n \"Secret key\" not in fields or\n fields[\"Method\"] != \"AES\"):\n raise Exception(\"Error reading AES key file.\")\n # print (fields)\n key = fields['Secret key']\n key = binascii.unhexlify(key)\n key_len = int(fields[\"Key length\"], 16)\n if len(key) != key_len:\n raise Exception(\"AES key file contains false information.\")\n \n return key", "def get_credentials():\n credential_dir = os.path.realpath('.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path) # stores the users credentials --> TODO: put in database\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n\n credentials = tools.run_flow(flow, store, flags)\n\n print('Storing credentials to ' + credential_path)\n return credentials", "def _get_credentials(self):\n\n scopes = 'https://www.googleapis.com/auth/drive'\n client_secret_file = '%s/config/client_secret.json' % PROJECT_DIR\n application_name = 'Drive API Quickstart'\n\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n\n credential_path = os.path.join(credential_dir, 'drive-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(client_secret_file, scopes)\n flow.user_agent = application_name\n\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n\n return credentials", "def from_file(self, filename):\n return EGStub.from_file(filename).to_cryptosystem()", "def test_init_json(self, mock_creds):\n pk = \"pk\"\n email = \"email\"\n file_data = '{\"private_key\": \"%s\", \"client_email\": \"%s\"}' % (pk, email)\n\n file_mock = mock.mock_open(read_data=file_data)\n with mock.patch.object(moves.builtins, 'open', file_mock):\n credentials.Credentials('key.json')\n mock_creds.assert_called_once_with(email, pk, mock.ANY)", "def get(self):\n self._lock.acquire()\n try:\n f = open(self._filename, 'r')\n credentials = pickle.loads(f.read())\n f.close()\n credentials.set_store(self.put)\n except:\n credentials = None\n self._lock.release()\n\n return credentials", "def load(cls, path, password=None):\n with open(path) as f:\n keystore = json.load(f)\n if not keys.check_keystore_json(keystore):\n raise ValueError('Invalid keystore file')\n return Account(keystore, password, path=path)", "def get_credentials(self, **kwargs):\n creds_file = os.path.join(kwargs['user_dir'], 'credentials.json')\n\n # Getting credentials from Storage\n store = file.Storage(creds_file)\n creds = store.get()\n\n # Validating or refreshing credentials, if necessary\n if creds is None or creds.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file,\n self.scopes)\n creds = tools.run_flow(flow, store)\n elif creds.access_token_expired:\n creds.refresh(httplib2.Http())\n else:\n pass\n\n return creds", "def load_key():\n return open(\"Secret.key\",\"rb\").read()", "def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,\r\n 'calendar-python-quickstart.json')\r\n\r\n store = oauth2client.file.Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def test_create_keypair_from_file(self):\n keys = RSA.generate(1024)\n nova_utils.save_keys_to_files(keys=keys, pub_file_path=pub_file_path)\n self.keypair_creator = create_keypairs.OpenStackKeypair(self.os_creds,\n create_keypairs.KeypairSettings(name=keypair_name,\n public_filepath=pub_file_path))\n self.keypair_creator.create()\n\n keypair = nova_utils.keypair_exists(self.keypair_creator.nova, self.keypair_creator.keypair)\n self.assertEquals(self.keypair_creator.keypair, keypair)\n\n file_key = open(os.path.expanduser(pub_file_path)).read()\n self.assertEquals(self.keypair_creator.keypair.public_key, file_key)", "def FromJson(json_value):\n json_key = json.loads(json_value)\n cred_type = CredentialType.FromTypeKey(json_key['type'])\n if cred_type == CredentialType.SERVICE_ACCOUNT:\n cred = service_account.ServiceAccountCredentials.from_json_keyfile_dict(\n json_key, scopes=config.CLOUDSDK_SCOPES)\n cred.user_agent = cred._user_agent = config.CLOUDSDK_USER_AGENT\n elif cred_type == CredentialType.USER_ACCOUNT:\n cred = client.OAuth2Credentials(\n access_token=None,\n client_id=json_key['client_id'],\n client_secret=json_key['client_secret'],\n refresh_token=json_key['refresh_token'],\n token_expiry=None,\n token_uri=json_key.get('token_uri'),\n user_agent=json_key.get('user_agent'),\n revoke_uri=json_key.get('revoke_uri'),\n id_token=json_key.get('id_token'),\n token_response=json_key.get('token_response'),\n scopes=json_key.get('scopes'),\n token_info_uri=json_key.get('token_info_uri'),\n rapt_token=json_key.get('rapt_token'),\n )\n elif cred_type == CredentialType.P12_SERVICE_ACCOUNT:\n # pylint: disable=protected-access\n cred = service_account.ServiceAccountCredentials._from_p12_keyfile_contents(\n service_account_email=json_key['client_email'],\n private_key_pkcs12=base64.b64decode(json_key['private_key']),\n private_key_password=json_key['password'],\n scopes=config.CLOUDSDK_SCOPES)\n cred.user_agent = cred._user_agent = config.CLOUDSDK_USER_AGENT\n else:\n raise UnknownCredentialsType(json_key['type'])\n return cred", "def __init__(self, cred_file, yaml_key):\n self.premium_search_args = load_credentials(cred_file,\n yaml_key=yaml_key,\n env_overwrite=False)", "def _get_credential(self):\n creds = None\n\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', self.config['SCOPES'])\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n self.service = build('drive', 'v3', credentials=creds)", "def get_credentials(args, my_dirname):\n\n credential_dir = os.path.join(my_dirname, '.credentials')\n if not os.path.exists(credential_dir):\n os.mkdir(credential_dir)\n credential_path = os.path.join(credential_dir, 'sheets.googleapis.com-cotus-checker.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n try:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, args)\n print('Storing credentials to ' + credential_path)\n except (oauth2client.clientsecrets.InvalidClientSecretsError, json.decoder.JSONDecodeError):\n pass\n return credentials", "def get_credentials(self):\n home_dir = os.path.expanduser(\"~\")\n credential_dir = os.path.join(home_dir, \".credentials\")\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, \"autoto.json\")\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, self.auth_flags)\n print(\"Storing credentials to \" + credential_path)\n return credentials", "def from_service_account_file(cls, filename: str, *args, **kwargs):\n return TraceServiceClient.from_service_account_file.__func__(TraceServiceAsyncClient, filename, *args, **kwargs) # type: ignore", "def __init__(\n self,\n creds=None,\n credential_path=\"\",\n credential_scopes=[\"https://www.googleapis.com/auth/drive\"],\n token_prefix=\"GoogleDrive_\",\n token_suffix=\"\",\n ):\n if creds is not None and self.credential_validation(creds):\n self.creds = creds\n else:\n self.creds = self.credential(\n credential_path, credential_scopes, token_prefix, token_suffix\n )", "def get_credential_storage(filename, client_id, user_agent, scope,\n warn_on_readonly=True):\n # Recreate the legacy key with these specific parameters\n key = {'clientId': client_id, 'userAgent': user_agent,\n 'scope': util.scopes_to_string(scope)}\n return get_credential_storage_custom_key(\n filename, key, warn_on_readonly=warn_on_readonly)", "def make_instance(self, include_optional):\n # model = openapi_client.models.cred_credential.CredCredential() # noqa: E501\n if include_optional :\n return CredCredential(\n id = '', \n account_guid = '', \n account_id = '', \n api_token = openapi_client.models.common/secret.common.Secret(\n encrypted = '', \n plain = '', ), \n ca_cert = '', \n description = '', \n external = True, \n last_modified = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), \n owner = '', \n role_arn = '', \n secret = openapi_client.models.common/secret.common.Secret(\n encrypted = '', \n plain = '', ), \n tokens = openapi_client.models.cred/temporary_token.cred.TemporaryToken(\n aws_access_key_id = '', \n aws_secret_access_key = openapi_client.models.common/secret.common.Secret(\n encrypted = '', \n plain = '', ), \n duration = 56, \n expiration_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), \n token = openapi_client.models.common/secret.common.Secret(\n encrypted = '', \n plain = '', ), ), \n type = '[\\\"aws\\\",\\\"azure\\\",\\\"gcp\\\",\\\"ibmCloud\\\",\\\"apiToken\\\",\\\"githubToken\\\",\\\"basic\\\",\\\"dtr\\\",\\\"kubeconfig\\\",\\\"certificate\\\"]', \n use_aws_role = True\n )\n else :\n return CredCredential(\n )", "def from_service_account_file(cls, filename: str, *args, **kwargs):\n return MetadataServiceClient.from_service_account_file.__func__(MetadataServiceAsyncClient, filename, *args, **kwargs) # type: ignore", "def get_credential_storage_custom_key(filename, key_dict,\n warn_on_readonly=True):\n multistore = _get_multistore(filename, warn_on_readonly=warn_on_readonly)\n key = util.dict_to_tuple_key(key_dict)\n return multistore._get_storage(key)", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'credentialv_modify.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n credentials_path = os.path.join(CREDENTIALS_DIR, CREDENTIALS_FILE)\n store = oauth2client.file.Storage(credentials_path)\n credentials = store.locked_get()\n\n if not credentials or credentials.invalid:\n client_secret_path = os.path.join(CREDENTIAL_DIR, CLIENT_SECRET_FILE)\n flow = client.flow_from_clientsecrets(client_secret_path, \n scope='https://www.googleapis.com/auth/admin.directory.resource.calendar',\n redirect_uri='urn:ietf:wg:oauth:2.0:oob')\n\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n\n print(\"Storing credentials to: \" + credentials_path)\n\n\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials(account):\n credential_dir = os.path.join(HOME_DIR, META_DIR, account, \"credentials\")\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, 'pyDrive.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def _get_credentials(self):\n if self.config_file:\n with open(self.config_file) as f:\n config_str = f.read()\n credentials_dict = json.loads(config_str)\n self.credentials = credentials_dict[self.account][self.auth_type]\n else:\n self.credentials = {\n \"account\": os.environ.get('SNOWSQL_ACCOUNT'),\n \"user\": os.environ.get('SNOWSQL_USER'),\n \"password\": os.environ.get('SNOWSQL_PWD')\n }", "def create_credentials():\r\n creds = None\r\n # The file token.pickle stores the user's access and refresh tokens, and is\r\n # created automatically when the authorization flow completes for the first\r\n # time.\r\n if os.path.exists('token.pickle'):\r\n with open('token.pickle', 'rb') as token:\r\n creds = pickle.load(token)\r\n # If there are no (valid) credentials available, let the user log in.\r\n if not creds or not creds.valid:\r\n if creds and creds.expired and creds.refresh_token:\r\n creds.refresh(Request())\r\n else:\r\n flow = InstalledAppFlow.from_client_secrets_file(\r\n 'client_secret.json', SCOPES)\r\n creds = flow.run_local_server()\r\n # Save the credentials for the next run\r\n with open('token.pickle', 'wb') as token:\r\n pickle.dump(creds, token)\r\n return creds", "def get_credentials(self):\n try:\n with open(self.credentials_file, 'r') as fh_credentials:\n credentials_dict = json.loads(fh_credentials.read())\n return credentials_dict\n except IOError:\n self.reset_credentials()\n with open(self.credentials_file, 'r') as fh_credentials:\n return json.loads(fh_credentials.read())", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'admin-directory_v1-NestedGroupSync.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatability with Python 2.6\n credentials = tools.run(flow, store)\n print 'Storing credentials to' + credential_path\n return credentials", "def get_credentials(loginfile=None, scope='prod'):\n scope, api_url = scope2props(scope)\n\n if loginfile is None:\n raise NotImplementedError(\"get_credentials not yet implemented for user-input login/password/key/secret\")\n elif isinstance(loginfile, str):\n with open(loginfile, 'r') as f:\n loginfile = json.load(f)\n elif isinstance(loginfile, dict):\n # Interact with it the same as the json file below\n pass\n else:\n raise ValueError(\"Invalid value for loginfile: \" + scope )\n c_key = loginfile['c_key']\n c_secret = loginfile['c_secret']\n username = loginfile['username']\n password = loginfile['password']\n\n # base64 takes 8-bit binary byte data, not a string object. Use .encode to convert this.\n # https://stackoverflow.com/questions/8908287/base64-encoding-in-python-3\n token_unencoded = c_key + \":\" + c_secret\n token = base64.b64encode(token_unencoded.encode('utf-8'))\n\n print(\"Submitting request with token (unencoded): {0}\".format(token_unencoded))\n print(\"Submitting request with token (encoded) : {0}\".format(token))\n\n # Login Request via POST to API\n # Not 100% sure what this extra decode is needed for\n headers = {'Content-Type': CONTENT_TYPE,\n 'Authorization': 'Basic ' + token.decode('utf-8')}\n data = {\n 'grant_type': 'password',\n 'username': username,\n 'password': password,\n 'scope': scope\n }\n\n r = requests.post(api_url, headers=headers, data=data)\n\n if r.status_code != 200:\n raise Exception(\"API returned error code: {0}\".format(r.status_code))\n\n # Return full credential info in dictionary json format\n return r.json()", "def load_creds(self):\n home = expanduser(\"~\")\n with open(os.path.join(home, 'creds.json')) as creds_file:\n self.creds_data = json.load(creds_file)", "def get_credentials(data_dir_path, client_secret_file_path, scopes, application_name):\n #home_dir = os.path.expanduser('~')\n #credential_dir = os.path.join(home_dir, '.credentials')\n credential_dir = os.path.join(data_dir_path, \".credentials\")\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'gmail-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(client_secret_file_path, scopes)\n flow.user_agent = application_name\n\n try:\n import argparse\n flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()\n except ImportError:\n flags = None\n\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def __init__(self, file_name, key):\n try:\n self._file_name = file_name\n self._encryptor = AES(key.encode())\n self._document = open(self._file_name, \"rb+\")\n except Exception as error:\n print(error)\n sys.exit(1)", "def load(cred_path: Path):\n logger.debug(f\"Retrieving credentials from {cred_path}\")\n if not cred_path.exists():\n raise Exception(f\"Authentication file {cred_path} does not exist.\")\n\n with open(cred_path, mode=\"r\", encoding=\"utf-8\") as cred:\n content = yaml.full_load(cred)\n return AzureCredentials(\n endpoint=content.get(\"endpoint\"), key=content.get(\"key\"),\n )", "def file_auth(self, token):\n if not os.path.isfile(token):\n raise FileNotFoundError(token + \" not found.\")\n\n store = file.Storage(token)\n creds = store.get()\n if creds.invalid:\n raise GclassCredInvalidError(\"File given is invalid!\")\n else:\n self.creds = creds\n self.build()", "def getKey(filename):\n try:\n fh = open(filename, 'rb')\n except IOError:\n logging.debug(\"getKey(): Creating new secret key.\")\n key = OpenSSL.rand.bytes(32)\n writeKeyToFile(key, filename)\n else:\n logging.debug(\"getKey(): Secret key file found. Loading...\")\n key = fh.read()\n fh.close()\n return key", "def get_credentials():\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir,'drive-python-quickstart.json')\r\n\r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\r\n flow.user_agent = APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def get_credentials(self):\n credential_path = 'annette/data/gmail-credentials.json'\n\n store = Storage(credential_path)\n credentials = store.get()\n flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()\n\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file, self.scopes)\n flow.user_agent = self.application_name\n credentials = tools.run_flow(flow, store, flags)\n _utils.logger.debug('Storing credentials to ' + credential_path)\n\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('gmail', 'v1', http=http, cache_discovery=False)\n return service", "def create( cls, user_id = None, private_keyfile_path = None ) :\n user_id = user_id or config.USER_ID()\n private_keyfile_path = private_keyfile_path or PRIVATE_RSA_KEYFILE_PATH()\n with open( private_keyfile_path, \"r\" ) as stream :\n private_key = rsa.PrivateKey.load_pkcs1( stream.read() )\n return cls( user_id, private_key )", "def initialize_drive():\n credentials_drive = ServiceAccountCredentials.from_json_keyfile_name(\n KEY_FILE_LOCATION, \n SCOPE\n )\n return gspread.authorize(credentials_drive)", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'calendar-python-quickstart.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(config['client secret file'], SCOPES)\n flow.user_agent = APPLICATION_NAME\n if args:\n credentials = tools.run_flow(flow, store, args)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'sheets.googleapis.com-python-quickstart.json')\n\n store = oauth2client.file.Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def get_credentials(provider, filename):\n\n import configparser\n from getpass import getpass\n cp = configparser.ConfigParser()\n cp.read(filename)\n provider = 'switch'\n return (cp.get(provider, 'project') + ':' + cp.get(provider, 'username'), getpass(), cp.get(provider, 'region'),\n cp.get(provider, 'keypair'), cp.get(provider, 'secgrp'))", "def from_env_file(cls, filename: str):\n # Import standard modules\n from dotenv import load_dotenv\n from os import getenv, path\n\n # Validate `path`\n if not path.isfile(filename):\n err = f\"No such file or directory: '{filename}'\"\n raise FileNotFoundError(err)\n\n load_dotenv(filename)\n\n key = getenv('API_KEY')\n secret = getenv('API_SECRET')\n url = getenv('API_URL')\n\n if not all([key, secret]):\n err = (\n '`API_KEY` and `API_SECRET` are mandatory attributes.\\n'\n 'Please make sure they are contained in your `.env` file'\n )\n raise KeyError(err)\n\n return cls(key, secret, url)", "def get_credentials(self):\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir, self.CRED_FILENAME)\r\n \r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\r\n flow.user_agent = self.APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def load_credentials(secrets: Secrets = None): # noqa: E501\n secrets = secrets or {}\n service_account_file = secrets.get(\"service_account_file\")\n service_account_info = secrets.get(\"service_account_info\")\n\n if not service_account_file:\n google_app_creds = os.getenv(\n \"GOOGLE_APPLICATION_CREDENTIALS\",\n os.getenv(\"GCP_APPLICATION_CREDENTIALS\"),\n )\n if google_app_creds:\n service_account_file = google_app_creds\n\n credentials = None\n if service_account_file:\n service_account_file = os.path.expanduser(service_account_file)\n if not os.path.exists(service_account_file):\n raise FailedActivity(\n \"GCP account settings not found at {}\".format(\n service_account_file\n )\n )\n\n logger.debug(\n \"Using GCP credentials from file: {}\".format(service_account_file)\n )\n credentials = Credentials.from_service_account_file(\n service_account_file\n )\n elif service_account_info and isinstance(service_account_info, dict):\n logger.debug(\"Using GCP credentials embedded into secrets\")\n credentials = Credentials.from_service_account_info(\n service_account_info\n )\n else:\n raise FailedActivity(\n \"missing GCP credentials settings in secrets of this activity\"\n )\n\n if credentials is not None and credentials.expired:\n logger.debug(\"GCP credentials need to be refreshed as they expired\")\n credentials.refresh(httplib2.Http())\n\n if not credentials:\n raise FailedActivity(\n \"missing a service account to authenticate with the \"\n \"Google Cloud Platform\"\n )\n\n return credentials", "def load_key():\n return open(\"secret.key\", \"rb\").read()", "def get_credentials() -> client.Credentials:\n\n credential_path = os.path.join(HOME_DIR, \"google-credentials.json\")\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(os.path.join(HOME_DIR, CLIENT_SECRET_FILE), SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n # This attempts to open an authorization page in the default web browser, and asks the user\n # to grant the bot access to their data. If the user grants permission, the run_flow()\n # function returns new credentials.\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print(\"Storing credentials to \" + credential_path)", "def get_credentials_from_file(creds_type) -> str:\n with open(\n f\"{TEST_DATA}/auth-basic-auth-mergeable/credentials/auth-basic-auth-{creds_type}-credentials.txt\"\n ) as credentials_file:\n return credentials_file.read().replace(\"\\n\", \"\")", "def _parse_creds(filename='.divvy'):\n\n creds = None\n\n try:\n file_path = os.path.expanduser('~') + '/' + filename\n with open(file_path, 'r') as credfile:\n for line in credfile:\n if line.strip()[0] == '#':\n pass\n elif ':' in line:\n username = line.strip().split(':')[0]\n password = line.strip().split(':')[1]\n creds = username, password\n break\n return creds\n\n # Fail silently as most people will not have creds file\n except IOError:\n return None\n\n except (UnboundLocalError, IndexError):\n print('Attempted to use a credentials dotfile ({}) but '\n 'it is either empty or malformed. Credentials should be in '\n 'the form <USERNAME>:<API_TOKEN>.'.format(file_path))\n raise", "def read_in_xforce_keys(file):\n key = file.readline().strip()\n password = file.readline().strip()\n if validate_api_creds(key) and validate_api_creds(password):\n return key, password\n else:\n print(\"API credentials invalid. Please check your key and password. Exiting...\")\n sys.exit(1)" ]
[ "0.7308526", "0.70017743", "0.65263546", "0.65192187", "0.64679414", "0.64679414", "0.64656276", "0.64656276", "0.64656276", "0.6400258", "0.6367309", "0.6249415", "0.6246309", "0.6221481", "0.621487", "0.621381", "0.6206755", "0.6169671", "0.61067", "0.6066779", "0.6063209", "0.6049124", "0.604187", "0.6032928", "0.60088533", "0.5992752", "0.5915883", "0.5914331", "0.59118724", "0.5898584", "0.58980155", "0.58763355", "0.5875221", "0.5847067", "0.5833816", "0.5806981", "0.57896906", "0.5783417", "0.5778675", "0.577444", "0.57654417", "0.5762288", "0.5751356", "0.5742989", "0.5740604", "0.57372767", "0.57247406", "0.5722625", "0.57172316", "0.57061386", "0.5688408", "0.5684476", "0.5670413", "0.5653966", "0.56491685", "0.5638404", "0.5632659", "0.56299025", "0.5624573", "0.5612819", "0.56106806", "0.5609901", "0.56086874", "0.56073487", "0.56071246", "0.56060326", "0.560033", "0.55980116", "0.55980116", "0.55980116", "0.55980116", "0.55980116", "0.5597754", "0.5592466", "0.55891967", "0.5588148", "0.558652", "0.5574099", "0.5574071", "0.55731446", "0.5570288", "0.5563203", "0.55625314", "0.55588627", "0.5553267", "0.554626", "0.5533678", "0.552135", "0.5518008", "0.55175316", "0.55175316", "0.55079126", "0.5505852", "0.5504219", "0.55009836", "0.54991996", "0.5497402", "0.54891866", "0.5486951", "0.5484218" ]
0.6788051
2
Decode a token on AppEngine.
def decode_token_appengine(credentials, token, verify=False): return _decode_token(credentials, token, False)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def decode_token(token):\n decoded_token = jwt.decode(token, secret_key, algorithms=['HS256'])\n return decoded_token", "def decode(token):\n return jwt.decode(token, app.config[\"JWT_SECRET\"], algorithms=[\"HS256\"])", "def decode_token(token):\n\n return jwt.decode(\n token, settings.JWT_SECRET, algorithms=[settings.JWT_ALGO])", "def decode(encoded_token):\n return jwt.decode(encoded_token, key=settings.JWT_AUTH['JWT_SECRET_KEY'])", "def parse_token(token):\n return jwt.decode(token, app.config['JWT_SECRET'])", "def test_decode_token():\n pass", "def decode_auth_token(auth_token):\n if len(auth_token) != 139:\n return \"Invalid token. Please log in again.\"\n try:\n payload = jwt.decode(auth_token, key)\n is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)\n if is_blacklisted_token:\n return 'Token blacklisted. Please log in again.'\n else:\n return payload['sub']\n except jwt.ExpiredSignatureError:\n return 'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return 'Invalid token. Please log in again.'", "def decode_token(token):\n payload = None\n try:\n payload = jwt.decode(token.encode('utf-8'), '1$Arh\"1bWa/7+OS', algorithm='HS256')['u_id']\n except jwt.InvalidTokenError:\n pass\n return payload", "def decode_auth_token(auth_token):\n try:\n payload = jwt.decode(auth_token, Config.SECRET_KEY,algorithms='HS256')\n return payload\n except jwt.ExpiredSignatureError:\n return 'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return 'Invalid token. Please log in again.'", "def decode_token(token):\n try:\n # try to decode the token using our SECRET variable\n payload = jwt.decode(token, app.config.get('SECRET_KEY'), algorithms=['HS256'])\n return payload['sub']\n except jwt.ExpiredSignatureError:\n # the token is expired, return an error string\n return \"Expired token. Please login to get a new token\"\n except jwt.InvalidTokenError:\n # the token is invalid, return an error string\n return \"Invalid token. Please register or login\"", "def decode_auth_token(auth_token):\n try:\n payload = jwt.decode(auth_token, app.config.get('SECRET_KEY'))\n\n # is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)\n # if is_blacklisted_token:\n # return 'Token blacklisted. Please log in again.'\n # else:\n return payload['sub']\n except jwt.ExpiredSignatureError:\n return 'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return 'Invalid token. Please log in again.'", "def decode_token(token, options=JWT_OPTIONS):\n return jwt.decode(\n token,\n SECRET_KEY,\n issuer=JWT_ISSUER,\n audience=JWT_AUDIENCE,\n options=options,\n algorithms=(JWT_OPTIONS_ALGORITHM,)\n )", "def decode_token(token):\n try:\n payload = jwt.decode(\n token, app.config.get('SECRET_KEY'), algorithms='HS256')\n return payload['sub']\n except jwt.ExpiredSignatureError:\n return \"Expired token. Please login to get a new token\"\n except jwt.InvalidTokenError:\n return \"Invalid token. Please register or login\"", "def _decode(token):\n if token is None:\n return None\n # Pad the token out to be divisible by 4.\n padded_token = bytes(token, 'utf8') + '='.encode() * (4 - (len(token) % 4))\n decoded_token = base64.urlsafe_b64decode(padded_token)\n token_dict = json.loads(decoded_token)\n if not token_dict or not isinstance(token_dict, dict):\n raise ValueError('Invalid pagination token: {}').format(token_dict)\n return token_dict", "def decode_auth_token(secret_key, auth_token):\n try:\n payload = jwt.decode(auth_token, secret_key) \n is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)\n if is_blacklisted_token:\n return 'Token blacklisted. Please log in again.' \n else: \n return payload['sub']\n except jwt.ExpiredSignatureError:\n return 'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return 'Invalid token. Please log in again.'", "def decode_token(token):\n try:\n # Decode token with our secret key\n payload = jwt.decode(token, SECRET_KEY)\n return payload['sub']\n except jwt.ExpiredSignatureError:\n # token has expired\n return \"Timed out. Please login to get a new token\"\n except jwt.InvalidTokenError:\n return \"Invalid token. Please register or login\"", "def decode_auth_token(auth_token): \n try: \n payload = jwt.decode(auth_token, getattr(settings, \"SECRET_KEY\", \"\"),algorithms=['HS256']) \n is_blacklisted_token = User.check_blacklist(auth_token)\n if is_blacklisted_token:\n return False,'Token blacklisted. Please log in again.'\n else:\n return True, payload['sub']\n except jwt.ExpiredSignatureError:\n return False,'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return False,'Invalid token. Please log in again.'", "def decode_token(token):\n try:\n # try to decode the token using our SECRET variable\n payload = jwt.decode(token, os.environ.get('SECRET', 'test'))\n return \"\", payload['sub']\n except jwt.ExpiredSignatureError:\n # the token is expired, return an error string\n return \"Expired token. Please login to get a new token\", None\n except jwt.InvalidTokenError:\n # the token is invalid, return an error string\n return \"Invalid token. Please register or login\", None", "def decode_token(token: str):\n try:\n decoded = b64decode(token.encode())\n key_data = orjson.loads(decoded)\n timestamp = int(key_data['t'])\n pub_key = key_data['p']\n signature = key_data['s']\n except (ValueError, TypeError, KeyError, orjson.JSONDecodeError, binascii.Error) as e:\n logging.debug(\"Invalid token format: %s\", token)\n raise HTTPException(status_code=403, detail=\"Invalid token\") from e\n\n if timestamp > time.time() or timestamp < time.time() - TOKEN_EXPIRE_INTERVAL:\n raise HTTPException(status_code=403, detail=\"Token expired\")\n\n try:\n check_signature(\n ''.join([pub_key, str(timestamp)]),\n signature,\n pub_key\n )\n except InvalidSignature as e:\n logging.error(\"Invalid token signature. Might be access violation.\")\n raise HTTPException(status_code=403, detail=\"Invalid token\") from e\n\n return pub_key", "def decodeJWT(self, token):\n try:\n return jwt.decode(token, self.secret, algorithms=[self.algorithm])\n except jwt.exceptions.InvalidSignatureError:\n raise ValueError(f'The following JWT is invalid: {token}')", "def decode_auth_token(auth_token, config):\n secret_key = config['JWT_SECRET_KEY']\n try:\n payload = jwt.decode(auth_token, secret_key)\n return payload['sub']\n except jwt.ExpiredSignatureError as error:\n raise ExpiredToken from error\n except (jwt.InvalidTokenError, jwt.DecodeError) as error:\n raise InvalidToken from error", "def decode_token(token):\n text = xlmr.decode(torch.tensor(token).long())\n return text.replace(' ', '')", "def decode_auth_token(auth_token):\n try:\n payload = jwt.decode(auth_token, Config.SECRET_KEY,algorithms='HS256')\n return payload['role']\n except jwt.ExpiredSignatureError:\n return 'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return 'Invalid token. Please log in again.'", "def decodeAuthToken(authToken):\n try:\n return jwt.decode(authToken, current_app.config['SECRET_KEY'], algorithm='HS256')['sub']\n except jwt.ExpiredSignatureError:\n return 'signature expired, Please login again'\n except jwt.InvalidTokenError:\n return 'Invalid token'", "def decode_auth_token(auth_token: str) -> Union[str, int]:\n try:\n payload = jwt.decode(auth_token, key, algorithms='HS256')\n \n user=Usuario.query.filter_by(id=payload['uid']).first()\n is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)\n if is_blacklisted_token:\n return 'Conta deslogada. Por favor realizar o login novamente.'\n elif user.ativo!=True:\n return 'Conta invativa. Por favor entrar em contato com o administrador.'\n else:\n return payload['uid']\n except jwt.ExpiredSignatureError:\n return 'Token expirado. Por favor realizar o login novamente.'\n except jwt.InvalidTokenError:\n return 'Token inválido. Por favor realizar o login novamente.'", "def decode_auth_token(auth_token):\n try:\n payload = jwt.decode(auth_token, key)\n return payload['sub']\n except jwt.ExpiredSignatureError:\n return 'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return 'Invalid token. Please log in again.'", "def decode(self, crypto):", "def decode_token(self, token: str, max_age: int) -> Optional[object]:\n try:\n return self.serializer.loads(token, max_age)\n except (BadSignature, SignatureExpired) as e:\n return None", "def decode(encoded):\n if encoded is None:\n return None\n\n try:\n s = decode(APP.config['SECRET_KEY'], encoded)\n return json.loads(s)\n except Exception as err:\n LOGGER.error('Error decoding auth: %s' % str(err))\n raise err", "def test_decode_token_invalid(token):\n payload = User.decode_auth_token(f'{token}1337')\n assert isinstance(payload, User) is False\n assert 'Invalid token' in payload", "def decode(self, encoded):", "def test_decode_token(token):\n payload = User.decode_auth_token(token)\n user = User.find_by_id(payload.get('id'))\n assert isinstance(user, User) is True\n assert user.email == '[email protected]'", "def decode_payload(encoded_payload):\n jwt_secret = app.config['SECRET_KEY']\n payload = jwt.decode(encoded_payload, jwt_secret, algorithms='HS256')\n\n return payload", "def decode(data): #@NoSelf", "def get_token(self):\n\n try:\n return jwt.decode(self.fetch_token(), KEY, algorithms=['HS256'])\n except jwt.exceptions.DecodeError:\n raise InvalidToken", "def decode_secret(secret, encoding=SecretEncoding.BASE32):\n return _decoding_map[encoding](secret)", "def get_payload(cls, token):\n \n secret = cls.secret\n algo = cls.algo\n decoded = jwt.decode(token, secret, algo)\n return decoded", "def decode_token_service_key(credentials, token, verify=True):\n return _decode_token(credentials, token, verify)", "def decode(self, *args, **kwargs):\n return self.tokenizer.decode(*args, **kwargs)", "def decode(self, *args, **kwargs):\n return self.tokenizer.decode(*args, **kwargs)", "def decode(cookie):\n try:\n compressed = False\n payload = cookie\n\n if payload.startswith(b'.'):\n compressed = True\n payload = payload[1:]\n\n data = payload.split(\".\")[0]\n\n data = base64_decode(data)\n if compressed:\n data = zlib.decompress(data)\n\n return flask_loads(data)\n except Exception, e:\n print e\n return \"[Decoding error: are you sure this was a Flask session cookie?]\"", "def decode(self, s):", "def decode(self, s):", "def decode(\n self,\n keys: Union[KeyInterface, List[KeyInterface]],\n token: Union[bytes, str],\n implicit_assertion: Union[bytes, str] = b\"\",\n deserializer: Optional[Any] = None,\n aud: str = \"\",\n ) -> Token:\n\n if deserializer:\n try:\n if not callable(deserializer.loads):\n raise ValueError(\"deserializer should have loads().\")\n except AttributeError:\n raise ValueError(\"deserializer should have loads().\")\n except Exception:\n raise\n\n keys = keys if isinstance(keys, list) else [keys]\n bi = implicit_assertion if isinstance(implicit_assertion, bytes) else implicit_assertion.encode(\"utf-8\")\n\n failed = None\n t = Token.new(token)\n for k in keys:\n if k.header != t.header:\n continue\n try:\n if k.purpose == \"local\":\n t.payload = k.decrypt(t.payload, t.footer, bi)\n else:\n t.payload = k.verify(t.payload, t.footer, bi)\n try:\n if deserializer:\n t.payload = deserializer.loads(t.payload)\n except Exception as err:\n raise ValueError(\"Failed to deserialize the payload.\") from err\n if deserializer:\n try:\n if t.footer:\n t.footer = deserializer.loads(t.footer)\n except Exception:\n pass\n self._verify_registered_claims(t.payload, aud)\n return t\n except Exception as err:\n failed = err\n if failed:\n raise failed\n raise ValueError(\"key is not found for verifying the token.\")", "def decode(self, value):\r\n pass", "def decode(self, tokens: List[str]) -> str:\n return self.bpe.decode([int(token) for token in tokens])", "def decode_request(self, data):\n return decode_jwt(data[\"jwt\"], data[\"cert_name\"], self.node.node_name, self.node.id)", "def verify_token(self, token: str) -> str:\n return decode(self.rd.hget(\"auth:by_token\", token))", "def decode(self, token, verify=True):\n try:\n return jwt.decode(\n token,\n self.get_verifying_key(token),\n algorithms=[self.algorithm],\n audience=self.audience,\n issuer=self.issuer,\n leeway=self.leeway,\n options={\n 'verify_aud': self.audience is not None,\n 'verify_signature': verify,\n },\n )\n except InvalidAlgorithmError as ex:\n raise TokenBackendError(_('Invalid algorithm specified')) from ex\n except InvalidTokenError:\n raise TokenBackendError(_('Token is invalid or expired'))", "def decode(data):\n raise NotImplementedError", "def decode(self, code):\n raise NotImplementedError", "async def validate_token(self, token: bytes, audience=None) -> Dict[str, str]:\n\n try:\n header = jwt.get_unverified_header(token)\n if \"kid\" not in header:\n raise InvalidToken(\"Missing kid in header\")\n return jwt.decode(token, await self.retrieve_public_key(self._decode_public_key_identifier(header[\"kid\"])), algorithms='RS256', issuer=tedious.config.CONFIG[\"TOKEN\"][\"issuer\"], audience=audience)\n except DecodeError:\n raise InvalidToken(\"Unable to decode token.\")\n except Exception as e:\n raise InvalidToken(str(type(e)) + \" \" + str(e))", "def decode(self, data):\n return self.__cipher.decrypt(data)", "def decrypt(self, encrypted_token: bytes) -> bytes:\n return None", "def decode(self, value):\r\n return value", "def parse_token(req):\n auth_string_list = req.headers.get('Authorization').split()\n # Check in correct format i.e. Bearer: 39xds03lda0...\n if len(auth_string_list) == 1:\n raise ValueError('Authorization has invalid format')\n else:\n token = auth_string_list[1]\n data = jwt.decode(token, config.SECRET_KEY, algorithms='HS256')\n return data", "def test_decode_IQ_token(self):\n\n token = \"\"\"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJuYW1laWQiOiJhZGZzfHNodzAwMXNhaW50ZWxzZXdoZXJlfGpwX2FkbWluQHVybjphdXRoMDpzYWludGVsc2V3aGVyZSIsImVtYWlsIjoiSmFpbWluLlBhdGVsODMrNTE2NDU2QGdtYWlsLmNvbSIsInVuaXF1ZV9uYW1lIjoiSVFHRU5IT1NQXFxiXy1kcHl4eDBFeVVjR0pIaG1aOCIsImh0dHBzOi8vdGVsZXRyYWNraW5nLmNsb3VkYXBwLm5ldC9pZGVudGl0eS9jbGFpbXMvYWR1c2VyZ3VpZCI6IjMveFFhZ0VrSWttcllBU0VQZHVZRmc9PSIsImh0dHBzOi8vdGVsZXRyYWNraW5nLmNsb3VkYXBwLm5ldC9pZGVudGl0eS9jbGFpbXMvZmlyc3RuYW1lIjoiQWRtaW4iLCJodHRwczovL3RlbGV0cmFja2luZy5jbG91ZGFwcC5uZXQvaWRlbnRpdHkvY2xhaW1zL2xhc3RuYW1lIjoiVGVzdCIsImh0dHBzOi8vdGVsZXRyYWNraW5nLmNsb3VkYXBwLm5ldC9pZGVudGl0eS9jbGFpbXMvb3VuYW1lIjoiU2FpbnRFbHNld2hlcmUiLCJyb2xlIjpbIkRvbWFpbiBVc2VycyIsIkFkbWluaXN0cmF0b3IiLCJJUUdlbkhvc3BTZWMiLCJTYWludEVsc2V3aGVyZSJdLCJ1cG4iOiJKYWltaW4uUGF0ZWw4Mys1MTY0NTZAZ21haWwuY29tIiwiaHR0cDovL3NjaGVtYXMuYXV0aDAuY29tL2lkZW50aXRpZXMvZGVmYXVsdC9wcm92aWRlciI6ImFkZnMiLCJodHRwOi8vc2NoZW1hcy5hdXRoMC5jb20vaWRlbnRpdGllcy9kZWZhdWx0L2Nvbm5lY3Rpb24iOiJzaHcwMDFzYWludGVsc2V3aGVyZSIsImh0dHA6Ly9zY2hlbWFzLmF1dGgwLmNvbS9pZGVudGl0aWVzL2RlZmF1bHQvaXNTb2NpYWwiOiJmYWxzZSIsImh0dHA6Ly9zY2hlbWFzLmF1dGgwLmNvbS9naXZlbl9uYW1lIjoiSVFHRU5IT1NQXFxiXy1kcHl4eDBFeVVjR0pIaG1aOCIsImh0dHA6Ly9zY2hlbWFzLmF1dGgwLmNvbS9waWN0dXJlIjoiaHR0cHM6Ly9zLmdyYXZhdGFyLmNvbS9hdmF0YXIvMzUxYTRiMjU4NWViM2UyYjA1NWI4ZTAyOGY4NzdmNDc_cz00ODBcdTAwMjZyPXBnXHUwMDI2ZD1odHRwcyUzQSUyRiUyRmNkbi5hdXRoMC5jb20lMkZhdmF0YXJzJTJGaXEucG5nIiwiaHR0cDovL3NjaGVtYXMuYXV0aDAuY29tL25pY2tuYW1lIjoiSmFpbWluLlBhdGVsODMrNTE2NDU2IiwiaHR0cDovL3NjaGVtYXMuYXV0aDAuY29tL2VtYWlsX3ZlcmlmaWVkIjoidHJ1ZSIsImh0dHA6Ly9zY2hlbWFzLmF1dGgwLmNvbS9jbGllbnRJRCI6Imtrakgxd3AzdE53RmpEN0M1djI3a0oyWHFWUHE1akhtIiwiaHR0cDovL3NjaGVtYXMuYXV0aDAuY29tL3VwZGF0ZWRfYXQiOiJNb24gSmFuIDE0IDIwMTkgMTU6NTY6MTIgR01UKzAwMDAgKFVUQykiLCJodHRwOi8vc2NoZW1hcy5hdXRoMC5jb20vY3JlYXRlZF9hdCI6IkZyaSBKYW4gMTEgMjAxOSAyMDoxNToyMiBHTVQrMDAwMCAoVVRDKSIsImF1dGhtZXRob2QiOiJodHRwOi8vc2NoZW1hcy5taWNyb3NvZnQuY29tL3dzLzIwMDgvMDYvaWRlbnRpdHkvYXV0aGVudGljYXRpb25tZXRob2QvcGFzc3dvcmQiLCJhdXRoX3RpbWUiOiIyMDE5LTAxLTE0VDIzOjU2OjEyLjg1M1oiLCJodHRwczovL3RlbGV0cmFja2luZy5jbG91ZGFwcC5uZXQvaWRlbnRpdHkvY2xhaW1zL3RlbmFudGlkIjoiMjExNmU5NDMtNTA5NC00MWY3LTgzMTgtODNhYWMyYWMxMTQ3IiwiaHR0cHM6Ly90ZWxldHJhY2tpbmcuY2xvdWRhcHAubmV0L2lkZW50aXR5L2NsYWltcy9jb250ZXh0cGVyc29uaWQiOiIwYTAxMjBhMS04NTU3LTQ4MzEtYTQyNi1hOGJkMDBmNjFkYzkiLCJodHRwczovL3RlbGV0cmFja2luZy5jbG91ZGFwcC5uZXQvaWRlbnRpdHkvY2xhaW1zL3VzZXJuYW1lZm9ybWFsIjoiVGVzdCwgQWRtaW4iLCJodHRwczovL3RlbGV0cmFja2luZy5jbG91ZGFwcC5uZXQvaWRlbnRpdHkvY2xhaW1zL3VzZXJuYW1laW5mb3JtYWwiOiJBZG1pbiBUZXN0IiwiaHR0cHM6Ly90ZWxldHJhY2tpbmcuY2xvdWRhcHAubmV0L2lkZW50aXR5L2NsYWltcy91c2VySWQiOiI0ZmU5OTdmZC00ZGNkLTQxNWItYjJjYi1hOGJkMDBmNjFkYzkiLCJodHRwczovL3RlbGV0cmFja2luZy5jbG91ZGFwcC5uZXQvaWRlbnRpdHkvY2xhaW1zL2ZlYXR1cmV0eXBlaWQiOlsiNCIsIjIiLCIxIiwiMyIsIjUiLCI2Il0sImlzcyI6InRlbGV0cmFja2luZy5jb20iLCJhdWQiOiJodHRwOi8vd3d3LnNlcnZpY2UudGVsZXRyYWNraW5nLmNvbS8iLCJleHAiOjE1NTAwNzM0MzksIm5iZiI6MTU0NzQ4MTQzOX0.UCL-Wc3OSVDI58U5ShOYqLa-DwNc_WQ3BlY5P3CfnVI\"\"\"\n audience = 'http://www.service.teletracking.com/'\n\n secret = 'drMemxWrLen6fCXQA5jO6gXkK/UoZVzPGRDiff7ByPU='\n decoded_token = AuthenticationHandler.validate_and_decode_token(\n token=token, key=secret,\n audience=audience\n )\n self.assertTrue(decoded_token['role'][0] == 'Domain Users', \"Group 1 not match\")\n self.assertTrue(decoded_token['role'][1] == 'Administrator', \"Group 2 not match\")", "def deserialize(token):\n\n if token.type == TYPE_BOOLEAN:\n return _to_boolean(token)\n elif token.type == TYPE_INTEGER:\n return _to_int(token)\n elif token.type == TYPE_FLOAT:\n return _to_float(token)\n elif token.type == TYPE_DATE:\n return _to_date(token)\n elif token.type in (TYPE_STRING, TYPE_MULTILINE_STRING, TYPE_BARE_STRING,\n TYPE_LITERAL_STRING, TYPE_MULTILINE_LITERAL_STRING):\n return _to_string(token)\n else:\n raise Exception('This should never happen!')", "def deparse(token):\n\n pass", "def _decode_key(self, key):\n return key if not key or isinstance(key, str) else key.decode()", "def load_token(token):\n\n #The Token itself was generated by User.get_auth_token. So it is up to \n #us to known the format of the token data itself. \n\n #The Token was encrypted using itsdangerous.URLSafeTimedSerializer which \n #allows us to have a max_age on the token itself. When the cookie is stored\n #on the users computer it also has a exipry date, but could be changed by\n #the user, so this feature allows us to enforce the exipry date of the token\n #server side and not rely on the users cookie to exipre. \n #max_age = app.config[\"REMEMBER_COOKIE_DURATION\"].total_seconds()\n\n #Decrypt the Security Token, data = [username, hashpass]\n data = login_serializer.loads(token)\n\n #Find the User\n user = User.query.filter_by(email = data[0]).first()\n\n #Check Password and return user or None\n if user and data[1] == user.pwd:\n \t#On update la derniere connection du user\n \tuser.update_last_connection()\n return user\n return None\n\n\n\n\n\n\n\n #######################################################\n ################# REQUETES ############################\n #######################################################", "def token(uncapped_token):\n return uncapped_token", "def deserialize_tokens():\n\ttry:\n\t\twith open(config.TOKENPATH, \"r+\") as f:\n\t\t\tcontext = f.read()\n\t\t\tres = eval(context)\n\t\t\t# load into memory\n\t\t\treturn res[\"access_token\"], res[\"refresh_token\"]\n\texcept:\n\t\t# unexcept token format\n\t\tfrom common import ApplicationException\n\t\traise ApplicationException(\"authorization file is broken, please run init\")", "def decrypt(code):\n f = Fernet(settings.SECRET_KEY)\n return f.decrypt(code).decode('ascii')", "async def decode(self, jwt_token: str, verify=True) -> dict:\n try:\n jwt_token = jwt_token.replace(f\"{self.auth_schema} \", \"\")\n payload = jwt.decode(\n jwt_token,\n self.jwt_secret,\n algorithms=(self.jwt_algorithm,),\n options={\"verify_exp\": verify},\n )\n\n return payload\n\n except jwt.DecodeError:\n raise InvalidTokenException()\n\n except jwt.ExpiredSignatureError:\n raise TokenExpiredException()", "def decode_jwt(encoded, key, algorithms = 'HS256'):\n try:\n payload = jwt.decode(\n encoded, \n key, \n algorithms = algorithms\n )\n\n return payload\n # if token has expired:\n except jwt.exceptions.ExpiredSignatureError:\n raise JWTError(\n {\n 'code': 'token_expired',\n 'description': 'Token expired.'\n }, \n 401\n )\n # other exceptions:\n except Exception:\n raise JWTError(\n {\n 'code': 'invalid_header',\n 'description': 'Unable to parse authentication token.'\n }, \n 400\n )", "def token_to_id(self, token):\r\n return self.encoder.get(token, self.encoder.get(self.unk_token))", "def decode_string(self, value):\r\n return value", "def mostlikelydecode(self):\n\n # Add your code here\n most_likely_codeword = Cipher(None) # Replace None with a method\n return most_likely_codeword.decode(None) # Replace None. What does decode take again in the Cipher class? ", "def de_en_coder(key,token,state=True):\n\tresult = list()\n\tkey , token_lower = key.lower().rstrip() , token.lower()\n\tkey_limit = len(key) - 1\n\tkey_ptr = 0 \n\tfor i,t in enumerate(token_lower) :\n\t\tif t not in dictionary :\n\t\t\tresult.append(t)\n\t\t\tcontinue\n\t\tt_index = dictionary.index(t)\n\t\tk_index = dictionary.index(key[key_ptr])\n\t\tr_index = ( (state) and (t_index+k_index) or (t_index-k_index) ) %26\n\t\tif token[i].isupper():\n\t\t\tresult.append(dictionary[r_index].upper())\n\t\telse:\n\t\t\tresult.append(dictionary[r_index])\n\t\tkey_ptr += (key_ptr==key_limit) and -key_ptr or 1\n\treturn \"\".join(result)", "def decoder(self):\n pass", "def load_token(token):\n \n #The Token itself was generated by User.get_auth_token. So it is up to \n #us to known the format of the token data itself. \n \n #The Token was encrypted using itsdangerous.URLSafeTimedSerializer which \n #allows us to have a max_age on the token itself. When the cookie is stored\n #on the users computer it also has a exipry date, but could be changed by\n #the user, so this feature allows us to enforce the exipry date of the token\n #server side and not rely on the users cookie to exipre. \n max_age = REMEMBER_COOKIE_DURATION.total_seconds()\n \n #Decrypt the Security Token, data = [username, hashpass]\n data = login_serializer.loads(token, max_age=max_age)\n \n #Find the User\n user = load_user(data[0])\n \n #Check Password and return user or None\n if user and data[1] == user.password:\n return user\n return None", "def decode(self, data: bytes) -> bytes:\n ...", "def _verified_token(self,encoded_token: bytes) -> Dict[str,Union[str,int,bool]]:\n try:\n return jwt.decode(encoded_token,self._SECRET_KEY,algorithms=self._ALGORITHM)\n except jwt.ExpiredSignatureError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.DecodeError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidAlgorithmError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidKeyError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidTokenError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidIssuerError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidAudienceError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidIssuedAtError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidSignatureError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.ImmatureSignatureError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.MissingRequiredClaimError as err:\n raise HTTPException(status_code=422,detail=str(err))", "def decode_expiry_value(byte_iter):\n value_length = MMSDecoder.decode_value_length(byte_iter)\n token = byte_iter.next()\n\n if token == 0x80: # Absolute-token\n return MMSDecoder.decode_date_value(byte_iter)\n elif token == 0x81: # Relative-token\n return MMSDecoder.decode_delta_seconds_value(byte_iter)\n\n raise wsp_pdu.DecodeError('Unrecognized token value: %s' % hex(token))", "def helper_decode(self, tokens: List[str]) -> str:\n chars: List[str] = []\n for token in tokens:\n decoded_token = self.decoder[token]\n token_chars = self.utf8_chars(decoded_token)\n for char in token_chars:\n if not torch.jit.is_scripting():\n # We iterate over \"char\", which is supposed to be a single\n # character, because the TorchScripted version of the code\n # correctly splits a string into single characters in\n # self.utf8_chars() but the non-TorchScripted version doesn't\n chars.extend(list(char))\n else:\n chars.append(char)\n decoded_chars: List[str] = []\n for char in chars:\n decoded_chars.append(chr(self.byte_decoder[char]))\n return ''.join(decoded_chars)", "def token(uncapped_token: Contract):\n return uncapped_token", "def decode_email(email):\n return", "def peek_app_token():\n if not os.path.exists(_token_storage_path):\n return None\n\n try:\n with open(_token_storage_path) as secret_file:\n return json.loads(secret_file.read())\n\n except Exception as exc:\n log.error(f'Could not read secret file.\\n{exc}')\n traceback.print_exc(file=sys.stderr)", "def test_decode(self):\n pass # TODO(tlarsen)", "def _upgrade_token(self, http_body):\n self.token_string = auth_sub_string_from_body(http_body)", "def decode(a):\n return decode(a)", "def decode_offset(token):\n token_dict = _decode(token)\n if token_dict is None:\n return None\n\n offset = token_dict.get('of')\n if not offset or not isinstance(offset, numbers.Number) or offset <= 0:\n raise ValueError('Invalid offset token JSON: {}'.format(token_dict))\n return offset", "def _decrypt(self, msg):\r\n # they must be real crypto experts at pubnub.com\r\n # two lines of code and two capital mistakes :-(\r\n # pylint: disable=E1101\r\n key = hashlib.sha256(self.cipher).hexdigest()[0:32]\r\n aes = AES.new(key, AES.MODE_CBC, \"0123456789012345\")\r\n decrypted = aes.decrypt(base64.decodestring(msg))\r\n return json.loads(decrypted[0:-ord(decrypted[-1])])", "def decode_response(response):\n return response.read().decode('utf-8')", "def decode(self): # pragma: no cover\n pass", "def decode(self, token):\n\n # <unk>, <pad> and other special tokens will be decoded into ''.\n text = self.tokenizer.decode(token, skip_special_tokens=True)\n\n # Handle replacement characters caused by multi-byte-pair-encoding or\n # Unicode surrogates or multi-code-point graphemes like emojis.\n if self.replacement in text:\n n = -self.surrogates if self.surrogates > 0 else len(self.buffer)\n tokens = self.buffer[n:] + [token]\n text = self.tokenizer.decode(tokens, skip_special_tokens=True)\n\n # Check whether the last grapheme was successfully decoded.\n if text and text[-1] != self.replacement:\n text = text.replace(self.replacement, \"\")\n self.surrogates = 0\n else:\n text = \"\"\n self.surrogates += 1\n else:\n self.surrogates = 0\n\n # Handle whitespace between tokens.\n tokens = self.buffer + [token]\n prefix = self.tokenizer.decode(self.buffer, skip_special_tokens=True)\n whole = self.tokenizer.decode(tokens, skip_special_tokens=True)\n if prefix + \" \" + text == whole:\n text = \" \" + text\n\n # Update buffer and offsets.\n self.buffer = self.buffer[-4:] + [token]\n self.start = self.end\n self.end += len(text)\n\n return text", "def _convert_token_to_id(self, token):\n return self.vocab.get(token, self.vocab.get(self.unk_token))", "def token_key(text):\n content2 = str(text.split())\n beginning = content2.find('access_token\":\"') + int(15)\n end = content2.find('token_type') - int(3)\n access_token = content2[beginning:end]\n return access_token", "def token_key(text):\n content2 = str(text.split())\n beginning = content2.find('access_token\":\"') + int(15)\n end = content2.find('token_type') - int(3)\n access_token = content2[beginning:end]\n return access_token", "def decode_mac_id(self, request, id):\n # There might be multiple secrets in use, if we're in the\n # process of transitioning from one to another. Try each\n # until we find one that works.\n secrets = self._get_token_secrets(request)\n for secret in secrets:\n try:\n data = tokenlib.parse_token(id, secret=secret)\n key = tokenlib.get_token_secret(id, secret=secret)\n break\n except ValueError:\n pass\n else:\n raise ValueError(\"invalid MAC id\")\n return key, data", "def _lookup_token(self):\n path = '/authn/{account}/{login}/authenticate'.format(\n account=self.account, login='admin'\n )\n res = self._post(path, data=self.api_token, skip_auth=True)\n return base64.b64encode(res.text)", "def try_get_user_id_from_token(token):\n dot_index = token.find('.')\n if (dot_index > 0):\n token_base64 = token[:dot_index]\n \n try:\n token_string = b64decode(token_base64)\n except Base64DecodeError:\n user_id = 0\n else:\n try:\n user_id = int(token_string)\n except ValueError:\n user_id = 0\n else:\n user_id = 0\n \n return user_id", "def doDecode(self):\n raise CipherError(\"override this funct and return the decoded msg\")", "def test_decode(self):\n assert url_encoder.decode('TheStakeOut') == 1\n assert url_encoder.decode('TheStockTip-TheSeven') == 800\n assert url_encoder.decode('MaleUnbonding-TheConversion-TheAndreaDoria') == 99999", "def verify_jwt(token):\n return jwt.decode(token.encode(), SECRET_KEY)", "def id_to_token(self, index):\r\n return self.decoder.get(index)", "def _resolve_secret_token(name, key, model_context):\n global _secret_token_map\n\n if _secret_token_map is None:\n _init_secret_token_map(model_context)\n\n secret_token = name + ':' + key\n return dictionary_utils.get_element(_secret_token_map, secret_token)", "def get_token(alias, reg_code, privKey):\n data = json.dumps({\n \"namespace\": alias,\n \"reg_code\": reg_code\n })\n url = endpoint('auth')\n r = requests.post(url,data=data) \n token_str = (r.__dict__['_content']).decode()\n r_token_obj = json.loads(token_str)\n token_cipher = ast.literal_eval( r_token_obj[\"token\"] )\n token_obj = dict()\n token_obj = {\n \"authToken\": decrypt_message( privKey, token_cipher),\n \"expiration_minutes\": r_token_obj[\"expiration_minutes\"],\n \"expiration\": str(datetime.datetime.now() + datetime.timedelta(minutes=r_token_obj[\"expiration_minutes\"]))\n }\n expiration = token_obj[\"expiration\"]\n expiration = parser.parse(expiration)\n if datetime.datetime.now() > expiration:\n print(\"Token has expired\")\n else:\n c = expiration - datetime.datetime.now()\n valid_minutes = str(divmod(c.total_seconds(), 60)[0])\n return token_obj[\"authToken\"]", "def deserializer():\n return bytes.decode" ]
[ "0.7735959", "0.76498765", "0.7568022", "0.74440914", "0.7167301", "0.7050499", "0.69980633", "0.69808865", "0.69499743", "0.6901275", "0.6866966", "0.6856232", "0.6854413", "0.68307525", "0.6819554", "0.68003577", "0.6735397", "0.66739464", "0.66599023", "0.665144", "0.66058666", "0.65918434", "0.65867525", "0.6559526", "0.6533071", "0.6478727", "0.64731866", "0.6469549", "0.64274627", "0.6423933", "0.64069843", "0.6342474", "0.6298641", "0.62620175", "0.6192522", "0.61581546", "0.6113766", "0.60970706", "0.6067179", "0.6067179", "0.60296506", "0.601475", "0.601475", "0.6003123", "0.5968189", "0.5938628", "0.5912072", "0.5900211", "0.58808196", "0.5863442", "0.5858495", "0.5856877", "0.5848512", "0.5821763", "0.5809411", "0.5806547", "0.579866", "0.5791016", "0.5774112", "0.57662547", "0.5740829", "0.57370454", "0.5736564", "0.57168496", "0.5714367", "0.571263", "0.5697972", "0.5674575", "0.56574357", "0.56553173", "0.56233954", "0.56172836", "0.5608038", "0.55916613", "0.55903316", "0.558921", "0.55831426", "0.55803466", "0.55584836", "0.5555139", "0.55420744", "0.55349034", "0.5533409", "0.5525994", "0.55059993", "0.5494729", "0.54922706", "0.54827833", "0.54749894", "0.54749894", "0.54705834", "0.5468558", "0.5465153", "0.5462442", "0.5458787", "0.5446138", "0.54376864", "0.541949", "0.5407765", "0.54005104" ]
0.7571242
2
Decode a token from a service account.
def decode_token_service_key(credentials, token, verify=True): return _decode_token(credentials, token, verify)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def decode_token(token):\n decoded_token = jwt.decode(token, secret_key, algorithms=['HS256'])\n return decoded_token", "def decode_token(token, options=JWT_OPTIONS):\n return jwt.decode(\n token,\n SECRET_KEY,\n issuer=JWT_ISSUER,\n audience=JWT_AUDIENCE,\n options=options,\n algorithms=(JWT_OPTIONS_ALGORITHM,)\n )", "def decode_token(token):\n\n return jwt.decode(\n token, settings.JWT_SECRET, algorithms=[settings.JWT_ALGO])", "def decode_token_appengine(credentials, token, verify=False):\n return _decode_token(credentials, token, False)", "def decode(token):\n return jwt.decode(token, app.config[\"JWT_SECRET\"], algorithms=[\"HS256\"])", "def decode(encoded_token):\n return jwt.decode(encoded_token, key=settings.JWT_AUTH['JWT_SECRET_KEY'])", "def decode_token(token):\n try:\n # Decode token with our secret key\n payload = jwt.decode(token, SECRET_KEY)\n return payload['sub']\n except jwt.ExpiredSignatureError:\n # token has expired\n return \"Timed out. Please login to get a new token\"\n except jwt.InvalidTokenError:\n return \"Invalid token. Please register or login\"", "def decode_auth_token(auth_token):\n if len(auth_token) != 139:\n return \"Invalid token. Please log in again.\"\n try:\n payload = jwt.decode(auth_token, key)\n is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)\n if is_blacklisted_token:\n return 'Token blacklisted. Please log in again.'\n else:\n return payload['sub']\n except jwt.ExpiredSignatureError:\n return 'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return 'Invalid token. Please log in again.'", "def decode_token(token):\n try:\n payload = jwt.decode(\n token, app.config.get('SECRET_KEY'), algorithms='HS256')\n return payload['sub']\n except jwt.ExpiredSignatureError:\n return \"Expired token. Please login to get a new token\"\n except jwt.InvalidTokenError:\n return \"Invalid token. Please register or login\"", "def decode_auth_token(auth_token, config):\n secret_key = config['JWT_SECRET_KEY']\n try:\n payload = jwt.decode(auth_token, secret_key)\n return payload['sub']\n except jwt.ExpiredSignatureError as error:\n raise ExpiredToken from error\n except (jwt.InvalidTokenError, jwt.DecodeError) as error:\n raise InvalidToken from error", "def decode_token(token):\n try:\n # try to decode the token using our SECRET variable\n payload = jwt.decode(token, app.config.get('SECRET_KEY'), algorithms=['HS256'])\n return payload['sub']\n except jwt.ExpiredSignatureError:\n # the token is expired, return an error string\n return \"Expired token. Please login to get a new token\"\n except jwt.InvalidTokenError:\n # the token is invalid, return an error string\n return \"Invalid token. Please register or login\"", "def decode_token(token):\n payload = None\n try:\n payload = jwt.decode(token.encode('utf-8'), '1$Arh\"1bWa/7+OS', algorithm='HS256')['u_id']\n except jwt.InvalidTokenError:\n pass\n return payload", "def decode_auth_token(auth_token):\n try:\n payload = jwt.decode(auth_token, app.config.get('SECRET_KEY'))\n\n # is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)\n # if is_blacklisted_token:\n # return 'Token blacklisted. Please log in again.'\n # else:\n return payload['sub']\n except jwt.ExpiredSignatureError:\n return 'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return 'Invalid token. Please log in again.'", "def decode_auth_token(auth_token):\n try:\n payload = jwt.decode(auth_token, Config.SECRET_KEY,algorithms='HS256')\n return payload\n except jwt.ExpiredSignatureError:\n return 'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return 'Invalid token. Please log in again.'", "def decode_auth_token(auth_token):\n try:\n payload = jwt.decode(auth_token, key)\n return payload['sub']\n except jwt.ExpiredSignatureError:\n return 'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return 'Invalid token. Please log in again.'", "def decode_auth_token(secret_key, auth_token):\n try:\n payload = jwt.decode(auth_token, secret_key) \n is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)\n if is_blacklisted_token:\n return 'Token blacklisted. Please log in again.' \n else: \n return payload['sub']\n except jwt.ExpiredSignatureError:\n return 'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return 'Invalid token. Please log in again.'", "def decodeJWT(self, token):\n try:\n return jwt.decode(token, self.secret, algorithms=[self.algorithm])\n except jwt.exceptions.InvalidSignatureError:\n raise ValueError(f'The following JWT is invalid: {token}')", "def parse_token(token):\n return jwt.decode(token, app.config['JWT_SECRET'])", "def decode_auth_token(auth_token):\n try:\n payload = jwt.decode(auth_token, Config.SECRET_KEY,algorithms='HS256')\n return payload['role']\n except jwt.ExpiredSignatureError:\n return 'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return 'Invalid token. Please log in again.'", "def decode_token(token):\n try:\n # try to decode the token using our SECRET variable\n payload = jwt.decode(token, os.environ.get('SECRET', 'test'))\n return \"\", payload['sub']\n except jwt.ExpiredSignatureError:\n # the token is expired, return an error string\n return \"Expired token. Please login to get a new token\", None\n except jwt.InvalidTokenError:\n # the token is invalid, return an error string\n return \"Invalid token. Please register or login\", None", "def decodeAuthToken(authToken):\n try:\n return jwt.decode(authToken, current_app.config['SECRET_KEY'], algorithm='HS256')['sub']\n except jwt.ExpiredSignatureError:\n return 'signature expired, Please login again'\n except jwt.InvalidTokenError:\n return 'Invalid token'", "def decode_auth_token(auth_token: str) -> Union[str, int]:\n try:\n payload = jwt.decode(auth_token, key, algorithms='HS256')\n \n user=Usuario.query.filter_by(id=payload['uid']).first()\n is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)\n if is_blacklisted_token:\n return 'Conta deslogada. Por favor realizar o login novamente.'\n elif user.ativo!=True:\n return 'Conta invativa. Por favor entrar em contato com o administrador.'\n else:\n return payload['uid']\n except jwt.ExpiredSignatureError:\n return 'Token expirado. Por favor realizar o login novamente.'\n except jwt.InvalidTokenError:\n return 'Token inválido. Por favor realizar o login novamente.'", "def decode_auth_token(auth_token): \n try: \n payload = jwt.decode(auth_token, getattr(settings, \"SECRET_KEY\", \"\"),algorithms=['HS256']) \n is_blacklisted_token = User.check_blacklist(auth_token)\n if is_blacklisted_token:\n return False,'Token blacklisted. Please log in again.'\n else:\n return True, payload['sub']\n except jwt.ExpiredSignatureError:\n return False,'Signature expired. Please log in again.'\n except jwt.InvalidTokenError:\n return False,'Invalid token. Please log in again.'", "def decode(\n self,\n keys: Union[KeyInterface, List[KeyInterface]],\n token: Union[bytes, str],\n implicit_assertion: Union[bytes, str] = b\"\",\n deserializer: Optional[Any] = None,\n aud: str = \"\",\n ) -> Token:\n\n if deserializer:\n try:\n if not callable(deserializer.loads):\n raise ValueError(\"deserializer should have loads().\")\n except AttributeError:\n raise ValueError(\"deserializer should have loads().\")\n except Exception:\n raise\n\n keys = keys if isinstance(keys, list) else [keys]\n bi = implicit_assertion if isinstance(implicit_assertion, bytes) else implicit_assertion.encode(\"utf-8\")\n\n failed = None\n t = Token.new(token)\n for k in keys:\n if k.header != t.header:\n continue\n try:\n if k.purpose == \"local\":\n t.payload = k.decrypt(t.payload, t.footer, bi)\n else:\n t.payload = k.verify(t.payload, t.footer, bi)\n try:\n if deserializer:\n t.payload = deserializer.loads(t.payload)\n except Exception as err:\n raise ValueError(\"Failed to deserialize the payload.\") from err\n if deserializer:\n try:\n if t.footer:\n t.footer = deserializer.loads(t.footer)\n except Exception:\n pass\n self._verify_registered_claims(t.payload, aud)\n return t\n except Exception as err:\n failed = err\n if failed:\n raise failed\n raise ValueError(\"key is not found for verifying the token.\")", "def get_token(self):\n\n try:\n return jwt.decode(self.fetch_token(), KEY, algorithms=['HS256'])\n except jwt.exceptions.DecodeError:\n raise InvalidToken", "def decode(self, token, verify=True):\n try:\n return jwt.decode(\n token,\n self.get_verifying_key(token),\n algorithms=[self.algorithm],\n audience=self.audience,\n issuer=self.issuer,\n leeway=self.leeway,\n options={\n 'verify_aud': self.audience is not None,\n 'verify_signature': verify,\n },\n )\n except InvalidAlgorithmError as ex:\n raise TokenBackendError(_('Invalid algorithm specified')) from ex\n except InvalidTokenError:\n raise TokenBackendError(_('Token is invalid or expired'))", "def decode_token(self, token: str, max_age: int) -> Optional[object]:\n try:\n return self.serializer.loads(token, max_age)\n except (BadSignature, SignatureExpired) as e:\n return None", "def _decode(token):\n if token is None:\n return None\n # Pad the token out to be divisible by 4.\n padded_token = bytes(token, 'utf8') + '='.encode() * (4 - (len(token) % 4))\n decoded_token = base64.urlsafe_b64decode(padded_token)\n token_dict = json.loads(decoded_token)\n if not token_dict or not isinstance(token_dict, dict):\n raise ValueError('Invalid pagination token: {}').format(token_dict)\n return token_dict", "def decode_token(token: str):\n try:\n decoded = b64decode(token.encode())\n key_data = orjson.loads(decoded)\n timestamp = int(key_data['t'])\n pub_key = key_data['p']\n signature = key_data['s']\n except (ValueError, TypeError, KeyError, orjson.JSONDecodeError, binascii.Error) as e:\n logging.debug(\"Invalid token format: %s\", token)\n raise HTTPException(status_code=403, detail=\"Invalid token\") from e\n\n if timestamp > time.time() or timestamp < time.time() - TOKEN_EXPIRE_INTERVAL:\n raise HTTPException(status_code=403, detail=\"Token expired\")\n\n try:\n check_signature(\n ''.join([pub_key, str(timestamp)]),\n signature,\n pub_key\n )\n except InvalidSignature as e:\n logging.error(\"Invalid token signature. Might be access violation.\")\n raise HTTPException(status_code=403, detail=\"Invalid token\") from e\n\n return pub_key", "async def validate_token(self, token: bytes, audience=None) -> Dict[str, str]:\n\n try:\n header = jwt.get_unverified_header(token)\n if \"kid\" not in header:\n raise InvalidToken(\"Missing kid in header\")\n return jwt.decode(token, await self.retrieve_public_key(self._decode_public_key_identifier(header[\"kid\"])), algorithms='RS256', issuer=tedious.config.CONFIG[\"TOKEN\"][\"issuer\"], audience=audience)\n except DecodeError:\n raise InvalidToken(\"Unable to decode token.\")\n except Exception as e:\n raise InvalidToken(str(type(e)) + \" \" + str(e))", "async def decode(self, jwt_token: str, verify=True) -> dict:\n try:\n jwt_token = jwt_token.replace(f\"{self.auth_schema} \", \"\")\n payload = jwt.decode(\n jwt_token,\n self.jwt_secret,\n algorithms=(self.jwt_algorithm,),\n options={\"verify_exp\": verify},\n )\n\n return payload\n\n except jwt.DecodeError:\n raise InvalidTokenException()\n\n except jwt.ExpiredSignatureError:\n raise TokenExpiredException()", "def get_payload(cls, token):\n \n secret = cls.secret\n algo = cls.algo\n decoded = jwt.decode(token, secret, algo)\n return decoded", "def deserialize_cred(context_obj, encoded_cred):\n\treturn serialize_or_deserialize_cred(context_obj,encoded_cred,DESERIALIZE)", "def deserialize_tokens():\n\ttry:\n\t\twith open(config.TOKENPATH, \"r+\") as f:\n\t\t\tcontext = f.read()\n\t\t\tres = eval(context)\n\t\t\t# load into memory\n\t\t\treturn res[\"access_token\"], res[\"refresh_token\"]\n\texcept:\n\t\t# unexcept token format\n\t\tfrom common import ApplicationException\n\t\traise ApplicationException(\"authorization file is broken, please run init\")", "def load_token(token):\n \n #The Token itself was generated by User.get_auth_token. So it is up to \n #us to known the format of the token data itself. \n \n #The Token was encrypted using itsdangerous.URLSafeTimedSerializer which \n #allows us to have a max_age on the token itself. When the cookie is stored\n #on the users computer it also has a exipry date, but could be changed by\n #the user, so this feature allows us to enforce the exipry date of the token\n #server side and not rely on the users cookie to exipre. \n max_age = REMEMBER_COOKIE_DURATION.total_seconds()\n \n #Decrypt the Security Token, data = [username, hashpass]\n data = login_serializer.loads(token, max_age=max_age)\n \n #Find the User\n user = load_user(data[0])\n \n #Check Password and return user or None\n if user and data[1] == user.password:\n return user\n return None", "def test_decode_token(token):\n payload = User.decode_auth_token(token)\n user = User.find_by_id(payload.get('id'))\n assert isinstance(user, User) is True\n assert user.email == '[email protected]'", "def decode(encoded):\n if encoded is None:\n return None\n\n try:\n s = decode(APP.config['SECRET_KEY'], encoded)\n return json.loads(s)\n except Exception as err:\n LOGGER.error('Error decoding auth: %s' % str(err))\n raise err", "async def get_user(token: str = Depends(get_user_token_strict)) -> schemas.UserToken:\n token_info = await security.decode_jwt(token)\n return schemas.UserToken.from_token(token_info)", "def parseAuthResponse(self, code):\n oaDict = {}\n\n # Get tokens\n result = self.fetchToken(code)\n if not result['OK']:\n return result\n self.log.debug('Token RESPONSE:\\n', pprint.pformat(result['Value']))\n oaDict['Tokens'] = result['Value']\n\n # Get user profile\n result = self.getUserProfile(oaDict['Tokens']['access_token'])\n if not result['OK']:\n return result\n oaDict['UserProfile'] = result['Value']\n self.log.debug('User profile RESPONSE:\\n', pprint.pformat(result['Value']))\n\n # Get tokens\n result = self.fetchToken(refreshToken=oaDict['Tokens']['refresh_token'])\n if not result['OK']:\n return result\n oaDict['Tokens'] = result['Value']\n self.log.debug('Token RESPONSE:\\n', pprint.pformat(result['Value']))\n\n return S_OK(oaDict)", "def test_decode_token_invalid(token):\n payload = User.decode_auth_token(f'{token}1337')\n assert isinstance(payload, User) is False\n assert 'Invalid token' in payload", "def verify_token(self, token: str) -> str:\n return decode(self.rd.hget(\"auth:by_token\", token))", "def decode_jwt(encoded, key, algorithms = 'HS256'):\n try:\n payload = jwt.decode(\n encoded, \n key, \n algorithms = algorithms\n )\n\n return payload\n # if token has expired:\n except jwt.exceptions.ExpiredSignatureError:\n raise JWTError(\n {\n 'code': 'token_expired',\n 'description': 'Token expired.'\n }, \n 401\n )\n # other exceptions:\n except Exception:\n raise JWTError(\n {\n 'code': 'invalid_header',\n 'description': 'Unable to parse authentication token.'\n }, \n 400\n )", "def _lookup_token(self):\n path = '/authn/{account}/{login}/authenticate'.format(\n account=self.account, login='admin'\n )\n res = self._post(path, data=self.api_token, skip_auth=True)\n return base64.b64encode(res.text)", "def decode_jwt(self, token):\n key = self.master_secret\n public_key = self.public_key\n if self.public_key_file is not None:\n with open(self.public_key_file, 'r') as rsa_pub_file:\n public_key = rsa_pub_file.read()\n if public_key is not None:\n key = public_key\n if self.leeway is not None:\n leeway = self.leeway\n else:\n leeway = 0\n options = {\n 'verify_exp': self.verify_expiration,\n }\n try:\n claims_set = jwt.decode(\n token,\n key,\n options=options,\n leeway=leeway,\n issuer=self.issuer\n )\n except (jwt.DecodeError, jwt.ExpiredSignature):\n return None\n return claims_set", "def parse_token_result(self, res: dict, what: str) -> None:\n if 'error' in res:\n message: str = '{}: {}'.format(what, res['error'].get('message'))\n code: int = int(res['error'].get('code'))\n\n if code == 401:\n raise TokenExpiredError(message, code)\n else:\n raise AuthenticationTokenError(message, code)\n\n self.token = res.get('_TOKEN')\n\n expires_at = res.get('expires-at')\n if expires_at:\n self.expires_at = int(expires_at)\n else:\n expires_in = res.get('expires_in')\n if expires_in:\n self.expires_at = self.get_epoch_millis() + int(expires_in) * 1000\n\n refresh_token = res.get('refresh_token')\n if refresh_token:\n self.refresh_token = refresh_token\n\n self.last_update = self.get_epoch_millis()", "def get_token(alias, reg_code, privKey):\n data = json.dumps({\n \"namespace\": alias,\n \"reg_code\": reg_code\n })\n url = endpoint('auth')\n r = requests.post(url,data=data) \n token_str = (r.__dict__['_content']).decode()\n r_token_obj = json.loads(token_str)\n token_cipher = ast.literal_eval( r_token_obj[\"token\"] )\n token_obj = dict()\n token_obj = {\n \"authToken\": decrypt_message( privKey, token_cipher),\n \"expiration_minutes\": r_token_obj[\"expiration_minutes\"],\n \"expiration\": str(datetime.datetime.now() + datetime.timedelta(minutes=r_token_obj[\"expiration_minutes\"]))\n }\n expiration = token_obj[\"expiration\"]\n expiration = parser.parse(expiration)\n if datetime.datetime.now() > expiration:\n print(\"Token has expired\")\n else:\n c = expiration - datetime.datetime.now()\n valid_minutes = str(divmod(c.total_seconds(), 60)[0])\n return token_obj[\"authToken\"]", "def _get_creds_from_token(token):\n cred_string = base64.b64decode(token).decode(\"ascii\")\n username, password = str(cred_string).split(\":\")\n return username, password", "def decode(self, crypto):", "def test_decode_token():\n pass", "def __get_token(self):\n r = requests.post(self.credentials.conf('endpoint') + '/tokens', json={\n 'auth': {\n 'passwordCredentials': {\n 'username': self.credentials.conf('username'),\n 'password': self.credentials.conf('password'),\n },\n 'tenantId': self.credentials.conf('tenant_id'),\n },\n })\n logger.debug('request:')\n logger.debug('%s', r.request.body)\n #print(r.status_code)\n if r.status_code != 200:\n logger.debug('%s', r.content)\n logger.debug('%s', r.json())\n raise RuntimeError('It failed to get token.')\n logger.debug('%s', r.content)\n j = r.json()\n logger.debug('%s', j)\n token = j['access']['token']['id']\n \n # Get DNS URL.\n \n dns_vers_url = None\n for svc in j['access']['serviceCatalog']:\n if svc['type'] == 'dns':\n for ep in svc['endpoints']:\n if ep['region'] == self.credentials.conf('region'):\n dns_vers_url = ep['publicURL']\n if not dns_vers_url:\n raise RuntimeError('It failed to get DNSv1 URL.')\n \n # Get DNSv1 URL.\n r = requests.get(dns_vers_url, headers={'Accept': 'application/json'})\n #print(r.status_code)\n if r.status_code != 300:\n logger.debug('%s', r.content)\n logger.debug('%s', r.json())\n raise RuntimeError('It failed to get DNS URLs.')\n logger.debug('%s', r.content)\n j = r.json()\n logger.debug('%s', j)\n \n url = None\n for val in j['versions']['values']:\n if val['id'] == 'v1':\n url = val['links'][0]['href']\n if not url:\n raise RuntimeError('No DNS v1 URL.')\n return (token, url)", "def get_token(self):\n auth_data = {\"auth\": {\"tenantName\": 'service',\n \"passwordCredentials\":{ \"username\": 'vsm',\n \"password\": self._password}}}\n\n auth_request = urllib2.Request(self._auth_url)\n auth_request.add_header(\"content-type\", \"application/json\")\n auth_request.add_header('Accept', 'application/json')\n auth_request.add_header('User-Agent', 'python-mikeyp')\n auth_request.add_data(json.dumps(auth_data))\n auth_response = urllib2.urlopen(auth_request)\n response_data = json.loads(auth_response.read())\n\n self._token = response_data['access']['token']['id']\n\n service_list = response_data['access']['serviceCatalog']\n for s in service_list:\n if s['type'] == 'vsm' and s['name'] == 'vsm':\n self._vsm_url = s['endpoints'][0]['publicURL']\n break\n\n url_id = self._vsm_url.split('/')[-1]\n return self._token + \"-\" + url_id", "def get_token(self, token_id):\n raise exception.NotImplemented() # pragma: no cover", "def get_access_token(self, decode: bool = False) -> Union[Dict, str]:\n token = os.environ.get('NEXTCODE_ACCESS_TOKEN') or get_access_token(self.profile.api_key)\n if decode:\n return decode_token(token)\n else:\n return token", "def decrypt(self, encrypted_token: bytes) -> bytes:\n return None", "def validate_token(token):\n # first, decode the token data to determine the tenant associated with the token. We are not able to\n # check the signature until we know which tenant, and thus, which public key, to use for validation.\n try:\n data = jwt.decode(token, verify=False)\n except Exception as e:\n logger.debug(f\"got exception trying to parse data from the access_token jwt; exception: {e}\")\n raise errors.AuthenticationError(\"could not parse the access token.\")\n # get the tenant out of the jwt payload and get associated public key\n token_tenant_id = data['tenant_id']\n try:\n public_key_str = get_tenant_config(token_tenant_id)['public_key']\n except errors.BaseTapisError:\n raise errors.AuthenticationError(\"Unable to process Tapis token; unexpected tenant_id.\")\n except KeyError:\n raise errors.AuthenticationError(\"Unable to process Tapis token; no public key associated with the \"\n \"tenant_id.\")\n # try:\n # pub_key = get_pub_rsa_key(public_key_str)\n # except Exception as e:\n # logger.error(f\"got exception trying to create public RSA key object; e: {e} \")\n # raise errors.ServiceConfigError(\"Unable to process public key associated with tenant.\")\n try:\n return jwt.decode(token, public_key_str, algorithm='RS256')\n except Exception as e:\n logger.debug(f\"Got exception trying to decode token; exception: {e}\")\n raise errors.AuthenticationError(\"Invalid Tapis token.\")", "def test_decode_IQ_token(self):\n\n token = \"\"\"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJuYW1laWQiOiJhZGZzfHNodzAwMXNhaW50ZWxzZXdoZXJlfGpwX2FkbWluQHVybjphdXRoMDpzYWludGVsc2V3aGVyZSIsImVtYWlsIjoiSmFpbWluLlBhdGVsODMrNTE2NDU2QGdtYWlsLmNvbSIsInVuaXF1ZV9uYW1lIjoiSVFHRU5IT1NQXFxiXy1kcHl4eDBFeVVjR0pIaG1aOCIsImh0dHBzOi8vdGVsZXRyYWNraW5nLmNsb3VkYXBwLm5ldC9pZGVudGl0eS9jbGFpbXMvYWR1c2VyZ3VpZCI6IjMveFFhZ0VrSWttcllBU0VQZHVZRmc9PSIsImh0dHBzOi8vdGVsZXRyYWNraW5nLmNsb3VkYXBwLm5ldC9pZGVudGl0eS9jbGFpbXMvZmlyc3RuYW1lIjoiQWRtaW4iLCJodHRwczovL3RlbGV0cmFja2luZy5jbG91ZGFwcC5uZXQvaWRlbnRpdHkvY2xhaW1zL2xhc3RuYW1lIjoiVGVzdCIsImh0dHBzOi8vdGVsZXRyYWNraW5nLmNsb3VkYXBwLm5ldC9pZGVudGl0eS9jbGFpbXMvb3VuYW1lIjoiU2FpbnRFbHNld2hlcmUiLCJyb2xlIjpbIkRvbWFpbiBVc2VycyIsIkFkbWluaXN0cmF0b3IiLCJJUUdlbkhvc3BTZWMiLCJTYWludEVsc2V3aGVyZSJdLCJ1cG4iOiJKYWltaW4uUGF0ZWw4Mys1MTY0NTZAZ21haWwuY29tIiwiaHR0cDovL3NjaGVtYXMuYXV0aDAuY29tL2lkZW50aXRpZXMvZGVmYXVsdC9wcm92aWRlciI6ImFkZnMiLCJodHRwOi8vc2NoZW1hcy5hdXRoMC5jb20vaWRlbnRpdGllcy9kZWZhdWx0L2Nvbm5lY3Rpb24iOiJzaHcwMDFzYWludGVsc2V3aGVyZSIsImh0dHA6Ly9zY2hlbWFzLmF1dGgwLmNvbS9pZGVudGl0aWVzL2RlZmF1bHQvaXNTb2NpYWwiOiJmYWxzZSIsImh0dHA6Ly9zY2hlbWFzLmF1dGgwLmNvbS9naXZlbl9uYW1lIjoiSVFHRU5IT1NQXFxiXy1kcHl4eDBFeVVjR0pIaG1aOCIsImh0dHA6Ly9zY2hlbWFzLmF1dGgwLmNvbS9waWN0dXJlIjoiaHR0cHM6Ly9zLmdyYXZhdGFyLmNvbS9hdmF0YXIvMzUxYTRiMjU4NWViM2UyYjA1NWI4ZTAyOGY4NzdmNDc_cz00ODBcdTAwMjZyPXBnXHUwMDI2ZD1odHRwcyUzQSUyRiUyRmNkbi5hdXRoMC5jb20lMkZhdmF0YXJzJTJGaXEucG5nIiwiaHR0cDovL3NjaGVtYXMuYXV0aDAuY29tL25pY2tuYW1lIjoiSmFpbWluLlBhdGVsODMrNTE2NDU2IiwiaHR0cDovL3NjaGVtYXMuYXV0aDAuY29tL2VtYWlsX3ZlcmlmaWVkIjoidHJ1ZSIsImh0dHA6Ly9zY2hlbWFzLmF1dGgwLmNvbS9jbGllbnRJRCI6Imtrakgxd3AzdE53RmpEN0M1djI3a0oyWHFWUHE1akhtIiwiaHR0cDovL3NjaGVtYXMuYXV0aDAuY29tL3VwZGF0ZWRfYXQiOiJNb24gSmFuIDE0IDIwMTkgMTU6NTY6MTIgR01UKzAwMDAgKFVUQykiLCJodHRwOi8vc2NoZW1hcy5hdXRoMC5jb20vY3JlYXRlZF9hdCI6IkZyaSBKYW4gMTEgMjAxOSAyMDoxNToyMiBHTVQrMDAwMCAoVVRDKSIsImF1dGhtZXRob2QiOiJodHRwOi8vc2NoZW1hcy5taWNyb3NvZnQuY29tL3dzLzIwMDgvMDYvaWRlbnRpdHkvYXV0aGVudGljYXRpb25tZXRob2QvcGFzc3dvcmQiLCJhdXRoX3RpbWUiOiIyMDE5LTAxLTE0VDIzOjU2OjEyLjg1M1oiLCJodHRwczovL3RlbGV0cmFja2luZy5jbG91ZGFwcC5uZXQvaWRlbnRpdHkvY2xhaW1zL3RlbmFudGlkIjoiMjExNmU5NDMtNTA5NC00MWY3LTgzMTgtODNhYWMyYWMxMTQ3IiwiaHR0cHM6Ly90ZWxldHJhY2tpbmcuY2xvdWRhcHAubmV0L2lkZW50aXR5L2NsYWltcy9jb250ZXh0cGVyc29uaWQiOiIwYTAxMjBhMS04NTU3LTQ4MzEtYTQyNi1hOGJkMDBmNjFkYzkiLCJodHRwczovL3RlbGV0cmFja2luZy5jbG91ZGFwcC5uZXQvaWRlbnRpdHkvY2xhaW1zL3VzZXJuYW1lZm9ybWFsIjoiVGVzdCwgQWRtaW4iLCJodHRwczovL3RlbGV0cmFja2luZy5jbG91ZGFwcC5uZXQvaWRlbnRpdHkvY2xhaW1zL3VzZXJuYW1laW5mb3JtYWwiOiJBZG1pbiBUZXN0IiwiaHR0cHM6Ly90ZWxldHJhY2tpbmcuY2xvdWRhcHAubmV0L2lkZW50aXR5L2NsYWltcy91c2VySWQiOiI0ZmU5OTdmZC00ZGNkLTQxNWItYjJjYi1hOGJkMDBmNjFkYzkiLCJodHRwczovL3RlbGV0cmFja2luZy5jbG91ZGFwcC5uZXQvaWRlbnRpdHkvY2xhaW1zL2ZlYXR1cmV0eXBlaWQiOlsiNCIsIjIiLCIxIiwiMyIsIjUiLCI2Il0sImlzcyI6InRlbGV0cmFja2luZy5jb20iLCJhdWQiOiJodHRwOi8vd3d3LnNlcnZpY2UudGVsZXRyYWNraW5nLmNvbS8iLCJleHAiOjE1NTAwNzM0MzksIm5iZiI6MTU0NzQ4MTQzOX0.UCL-Wc3OSVDI58U5ShOYqLa-DwNc_WQ3BlY5P3CfnVI\"\"\"\n audience = 'http://www.service.teletracking.com/'\n\n secret = 'drMemxWrLen6fCXQA5jO6gXkK/UoZVzPGRDiff7ByPU='\n decoded_token = AuthenticationHandler.validate_and_decode_token(\n token=token, key=secret,\n audience=audience\n )\n self.assertTrue(decoded_token['role'][0] == 'Domain Users', \"Group 1 not match\")\n self.assertTrue(decoded_token['role'][1] == 'Administrator', \"Group 2 not match\")", "def _verified_token(self,encoded_token: bytes) -> Dict[str,Union[str,int,bool]]:\n try:\n return jwt.decode(encoded_token,self._SECRET_KEY,algorithms=self._ALGORITHM)\n except jwt.ExpiredSignatureError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.DecodeError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidAlgorithmError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidKeyError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidTokenError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidIssuerError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidAudienceError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidIssuedAtError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.InvalidSignatureError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.ImmatureSignatureError as err:\n raise HTTPException(status_code=422,detail=str(err))\n except jwt.MissingRequiredClaimError as err:\n raise HTTPException(status_code=422,detail=str(err))", "def get_token_from_json(json):\r\n return PodiumToken(json[\"access_token\"], json[\"token_type\"], json[\"created_at\"])", "def token(uncapped_token: Contract):\n return uncapped_token", "def decode_request(self, data):\n return decode_jwt(data[\"jwt\"], data[\"cert_name\"], self.node.node_name, self.node.id)", "def parse_id_token(self, token, nonce, claims_options=None, leeway=120):\n if 'id_token' not in token:\n return None\n\n def load_key(header, _):\n jwk_set = JsonWebKey.import_key_set(self.fetch_jwk_set())\n try:\n return jwk_set.find_by_kid(header.get('kid'))\n except ValueError:\n # re-try with new jwk set\n jwk_set = JsonWebKey.import_key_set(self.fetch_jwk_set(force=True))\n return jwk_set.find_by_kid(header.get('kid'))\n\n claims_params = dict(\n nonce=nonce,\n client_id=self.client_id,\n )\n if 'access_token' in token:\n claims_params['access_token'] = token['access_token']\n claims_cls = CodeIDToken\n else:\n claims_cls = ImplicitIDToken\n\n metadata = self.load_server_metadata()\n if claims_options is None and 'issuer' in metadata:\n claims_options = {'iss': {'values': [metadata['issuer']]}}\n\n alg_values = metadata.get('id_token_signing_alg_values_supported')\n if alg_values:\n _jwt = JsonWebToken(alg_values)\n else:\n _jwt = jwt\n\n claims = _jwt.decode(\n token['id_token'], key=load_key,\n claims_cls=claims_cls,\n claims_options=claims_options,\n claims_params=claims_params,\n )\n # https://github.com/lepture/authlib/issues/259\n if claims.get('nonce_supported') is False:\n claims.params['nonce'] = None\n\n claims.validate(leeway=leeway)\n return UserInfo(claims)", "def get_token(self):\n message = {\n \"request\": \"access_token\",\n \"account\": self.account,\n \"min_valid_period\": self.validity,\n \"application_hint\": \"orpy\",\n }\n try:\n self._sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n self._sock.connect(self.socket_path)\n self._sock.sendall(json.dumps(message).encode())\n\n data = \"\"\n while True:\n recv = self._sock.recv(16).decode()\n if recv:\n data += recv\n else:\n break\n except socket.error as err:\n raise exceptions.AuthExceptiob(\n err=\"Cannot communicate with the \" \"oidc-agent: %s\" % err\n )\n finally:\n self._sock.close()\n\n token = json.loads(data)\n if token.get(\"status\") == \"failure\":\n raise exceptions.AuthError(err=token.get(\"error\"))\n return token", "def load_token(token):\n\n #The Token itself was generated by User.get_auth_token. So it is up to \n #us to known the format of the token data itself. \n\n #The Token was encrypted using itsdangerous.URLSafeTimedSerializer which \n #allows us to have a max_age on the token itself. When the cookie is stored\n #on the users computer it also has a exipry date, but could be changed by\n #the user, so this feature allows us to enforce the exipry date of the token\n #server side and not rely on the users cookie to exipre. \n #max_age = app.config[\"REMEMBER_COOKIE_DURATION\"].total_seconds()\n\n #Decrypt the Security Token, data = [username, hashpass]\n data = login_serializer.loads(token)\n\n #Find the User\n user = User.query.filter_by(email = data[0]).first()\n\n #Check Password and return user or None\n if user and data[1] == user.pwd:\n \t#On update la derniere connection du user\n \tuser.update_last_connection()\n return user\n return None\n\n\n\n\n\n\n\n #######################################################\n ################# REQUETES ############################\n #######################################################", "def get(uid: int, token_id: int):\n\n token = Token.get(uid, token_id).as_dto().to_primitive()\n\n if token:\n return token.to_primitive()\n else:\n raise NotFound(\"Token Not Found\")", "def decode_payload(encoded_payload):\n jwt_secret = app.config['SECRET_KEY']\n payload = jwt.decode(encoded_payload, jwt_secret, algorithms='HS256')\n\n return payload", "def decode_secret(secret, encoding=SecretEncoding.BASE32):\n return _decoding_map[encoding](secret)", "def decrypt_message(self):\n token = bytes(self.args['token'].encode('utf-8'))\n message = base64.urlsafe_b64decode(token)\n\n # Check that the message is valid (HMAC-SHA1 checking).\n if not self.check_hmac_signature(message):\n raise TokenAuthenticationError('HMAC authentication failed')\n\n init_vector = message[:16]\n enc_message = message[16:-20]\n\n aes = AES.new(bytes(self.settings['aes_key'].encode('utf-8')), AES.MODE_CBC, init_vector)\n message = aes.decrypt(enc_message).decode('utf-8')\n\n # Get the login data in an easy-to-use tuple.\n try:\n login_data = self.get_login_data(message)\n except AttributeError:\n # Regex failed, so data was not valid.\n raise TokenAuthenticationError('Message does not contain valid login data')\n\n name = login_data[2].strip()\n first_name = name.split(' ').pop(0)\n parts = name.split(' ')\n parts.pop(0)\n last_name = \" \".join(parts)\n email = login_data[3].strip()\n email = ''.join(x for x in email if x in string.printable)\n\n data = {\n 'timestamp': login_data[0],\n 'remote_id': email,\n 'email': email,\n 'first_name': first_name,\n 'last_name': last_name,\n 'username': email\n }\n\n return data", "def decodeAccesshTokenForRefreshToken( accessToken):\n try:\n payload = jwt.decode(accessToken, ApiJWTAuthentication.secretKey_access)\n return {\"message\": \"success\",\"refresh_token\": payload['refresh_token']}\n except jwt.ExpiredSignatureError:\n return {\"message\": \"Expired Access Token\"}\n except jwt.InvalidTokenError:\n return {\"message\": \"Invalid access Token\"}", "def decode_link_ticket(encoded):\n return replication_pb2.ServiceLinkTicket.FromString(b64.decode(encoded))", "def get_token(client, email_or_username, password):\n\turl = 'account/token'\n\tbasic_auth = (email_or_username, password)\n\treturn client._request(url, Request.GET, basic_auth=basic_auth)", "def _parse_token(self, response=None):\n token_url = 'https://tinychat.com/start?#signin'\n if response is None:\n response = util.web.http_get(url=token_url, referer=token_url, proxy=self._proxy)\n\n if response is not None and response['content'] is not None:\n soup = BeautifulSoup(response['content'], 'html.parser')\n\n token = soup.find(attrs={'name': 'csrf-token'})\n self._token = token['content']", "def decode_mac_id(self, request, id):\n # There might be multiple secrets in use, if we're in the\n # process of transitioning from one to another. Try each\n # until we find one that works.\n secrets = self._get_token_secrets(request)\n for secret in secrets:\n try:\n data = tokenlib.parse_token(id, secret=secret)\n key = tokenlib.get_token_secret(id, secret=secret)\n break\n except ValueError:\n pass\n else:\n raise ValueError(\"invalid MAC id\")\n return key, data", "def LookupToken(self, dmtoken):\n self.ReadClientStateFile()\n return self._registered_tokens.get(dmtoken, None)", "def try_get_user_id_from_token(token):\n dot_index = token.find('.')\n if (dot_index > 0):\n token_base64 = token[:dot_index]\n \n try:\n token_string = b64decode(token_base64)\n except Base64DecodeError:\n user_id = 0\n else:\n try:\n user_id = int(token_string)\n except ValueError:\n user_id = 0\n else:\n user_id = 0\n \n return user_id", "def parse_token(req):\n auth_string_list = req.headers.get('Authorization').split()\n # Check in correct format i.e. Bearer: 39xds03lda0...\n if len(auth_string_list) == 1:\n raise ValueError('Authorization has invalid format')\n else:\n token = auth_string_list[1]\n data = jwt.decode(token, config.SECRET_KEY, algorithms='HS256')\n return data", "def get_token(self, tenant_name, user_name, password):\n _url = \"http://\" + self.host_ip + \":5000/v2.0/tokens\"\n _headers = {\"content-type\": \"application/json\"}\n _token_info = {\"auth\": {\"tenantName\": tenant_name,\n \"passwordCredentials\":\n {\"username\": user_name,\n \"password\": password}}\n }\n\n _body = json.dumps(_token_info)\n response = self.request(\"POST\", _url, _headers, _body)\n if response is None:\n LOG_OBJ.error(\"No response from Server while getting token for\"\n \" tenant: %s\" % tenant_name)\n return response\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Request of token for %s tenant Failed with\"\n \" status %s \" % (tenant_name, response.status))\n return response.status\n output = json.loads(response.data)\n token_id = output['access']['token']['id']\n LOG_OBJ.debug(\"Token ID for tenant %s is %s\" % (tenant_name, token_id))\n\n return token_id", "def decode(code: bytes):# -> Transaction:\n vals: list[str] = str(bytes).split(' ')\n result: Transaction = Transaction()\n result.id = int(vals[0])\n result.time = int(vals[1])\n result.action = string_to_action(vals[2])\n result.acting_username = vals[3]\n result.source_account_id = int(vals[4])\n result.destination_account_id = int(vals[5])\n result.funds_amount = int(vals[6])\n return result", "def _requestSwiftToken(self):\n oauth_access_token = self.accessTokenManager.token\n c, r = http._get(\n self.auth_package.HUBIC_API+'account/credentials/',\n headers={\n 'Authorization': 'Bearer '+oauth_access_token\n }\n )\n result = json.loads(r.read())\n c.close()\n\n if r.status != 200:\n try:\n err =result\n err['code'] = r.status\n except Exception as e:\n err = {}\n\n raise Exception(\"Unable to get swift token, \"\n \"(%s)\"%str(err))\n\n self._endpoint = result['endpoint']\n self._token = result['token']\n self._expire = datetime.strptime( result['expires'][:-6], \"%Y-%m-%dT%H:%M:%S\" ) - timedelta(seconds=10)", "def decode_token(token):\n text = xlmr.decode(torch.tensor(token).long())\n return text.replace(' ', '')", "def deserialize(token):\n\n if token.type == TYPE_BOOLEAN:\n return _to_boolean(token)\n elif token.type == TYPE_INTEGER:\n return _to_int(token)\n elif token.type == TYPE_FLOAT:\n return _to_float(token)\n elif token.type == TYPE_DATE:\n return _to_date(token)\n elif token.type in (TYPE_STRING, TYPE_MULTILINE_STRING, TYPE_BARE_STRING,\n TYPE_LITERAL_STRING, TYPE_MULTILINE_LITERAL_STRING):\n return _to_string(token)\n else:\n raise Exception('This should never happen!')", "def test_get_u2ftoken_by_id(self):\n response = self.client.get_u2ftoken_by_id(\"DU012345678901234567\")\n uri, args = response[\"uri\"].split(\"?\")\n\n self.assertEqual(response[\"method\"], \"GET\")\n self.assertEqual(uri, \"/admin/v1/u2ftokens/DU012345678901234567\")\n self.assertEqual(util.params_to_dict(args), {\"account_id\": [self.client.account_id]})", "def token(cls, token):\n user_db = User.get_by('token', token)\n if not user_db:\n raise ValueError('Sorry, your token is either invalid or expired.')\n return token", "def verify_decode_jwt(token):\n jsonurl = urlopen(f'https://{AUTH0_DOMAIN}/.well-known/jwks.json')\n jwks = json.loads(jsonurl.read())\n\n unverified_header = jwt.get_unverified_header(token)\n\n if 'kid' not in unverified_header:\n raise AuthError({\n 'code': 'invalid_header',\n 'description': 'Header of token must contain key id.'\n }, 401)\n\n rsa_key = {}\n for key in jwks['keys']:\n if key['kid'] == unverified_header['kid']:\n rsa_key = {\n 'kty': key['kty'],\n 'kid': key['kid'],\n 'use': key['use'],\n 'n': key['n'],\n 'e': key['e']\n }\n\n if not rsa_key:\n raise AuthError({\n 'code': 'invalid_header',\n 'description': 'Unable to find appropriate key for token.'\n }, 401)\n\n try:\n payload = jwt.decode(\n token,\n rsa_key,\n algorithms=ALGORITHMS,\n audience=API_AUDIENCE,\n issuer='https://' + AUTH0_DOMAIN + '/'\n )\n return payload\n except jwt.ExpiredSignatureError:\n raise AuthError({\n 'code': 'token_expired',\n 'description': 'Token expired.'\n }, 401)\n except jwt.JWTClaimsError:\n raise AuthError({\n 'code': 'invalid_claims',\n 'description': 'Incorrect claims. Please check the audience and issuer.'\n }, 401)\n except Exception:\n raise AuthError({\n 'code': 'invalid_header',\n 'description': 'Unable to parse token.'\n }, 401)", "def _parse_security_token(token):\n if not token:\n return None\n if ':' not in token:\n logging.warn('Malformed token: no signature separator')\n return None\n sig, body = token.split(':', 1)\n if _DISABLE_CRYPTO:\n plaintext = body\n else:\n key_storage = KeyStorage.get()\n hmac_key = key_storage.hmac_key\n if type(hmac_key) == unicode:\n # Crypto requires byte strings\n hmac_key = hmac_key.encode('utf8')\n computed_sig = HMAC.HMAC(key=hmac_key,\n msg=body).hexdigest()\n if sig != computed_sig:\n logging.warn('Malformed token: invalid signature')\n return None\n try:\n plaintext = AES.new(key_storage.aes_key,\n AES.MODE_CBC).decrypt(body)\n except ValueError:\n logging.warn('Malformed token: wrong size')\n return None\n # Remove excess whitespace.\n plaintext = plaintext.strip()\n # The plaintext should contain at least one space.\n if ' ' not in plaintext:\n logging.warn('Malformed token: bad contents')\n return None\n parts = plaintext.split(' ')\n if len(parts) != 2:\n logging.warn('Malformed token: bad structure')\n return None\n timestamp, email = parts\n try:\n timestamp = int(timestamp, 16)\n except ValueError:\n logging.warn('Malformed token: bad timestamp')\n return None\n # Reject tokens that are too old or which have time-traveled. We\n # allow for 1s of clock skew.\n age_s = time.time() - timestamp\n if age_s < -1 or age_s > _TOKEN_TIMEOUT_S:\n logging.warn('Malformed token: expired (age=%ds)', age_s)\n return None\n cred = _Credentials()\n cred.email = email\n cred.security_token_is_stale = (age_s > 0.5 * _TOKEN_TIMEOUT_S)\n return cred", "def _decrypt(self, msg):\r\n # they must be real crypto experts at pubnub.com\r\n # two lines of code and two capital mistakes :-(\r\n # pylint: disable=E1101\r\n key = hashlib.sha256(self.cipher).hexdigest()[0:32]\r\n aes = AES.new(key, AES.MODE_CBC, \"0123456789012345\")\r\n decrypted = aes.decrypt(base64.decodestring(msg))\r\n return json.loads(decrypted[0:-ord(decrypted[-1])])", "def verify_token(token: str, credentials_exception: HTTPException) -> str:\n\n try:\n payload = jwt.decode(token, SECRET_KEY, algorithms=ALGORITHM)\n username: str = payload.get(\"sub\")\n if username is None:\n raise credentials_exception\n except JWTError:\n raise credentials_exception\n return username", "def decode(self, response, request):\n log.debug(\"Decoding authorization.\")\n auth = self._parseAuth(response)\n try:\n self._verifyChallenge(auth[\"challenge\"], request)\n creds = self.buildCredentials(auth, request)\n except KeyError, ke:\n raise LoginFailed(\"{0!r} not in authorization\".format(*ke.args))\n except LoginFailed, lf:\n log.warn(lf)\n raise\n log.debug(\"Decoded credentials: {0}\".format(creds))\n return creds", "def _authenticate(self):\n url = self.endpoint + \"/tokens\"\n h = httplib2.Http()\n response, rawcontent = h.request(\n url, \n method=\"POST\",\n headers={ \"Content-Type\":\"application/json\" },\n body=json.dumps(self.credentials()))\n content = json.loads(rawcontent)\n self.token = content['access']['token']['id']\n #TODO: this needs to convert the ISO8601 string to a timestamp\n self.expiration = content['access']['token']['expires']\n self.catalog = content['access']['serviceCatalog']", "def get_token(self, code):\n\n # live need post a form to get token\n headers = {'Content-type': 'application/x-www-form-urlencoded'}\n data = {\n 'client_id': get_config('login.live.client_id'),\n 'client_secret': get_config('login.live.client_secret'),\n 'redirect_uri': get_config('login.live.redirect_uri'),\n 'grant_type': 'authorization_code',\n 'code': code\n }\n # Following is use urllib to post request\n url = get_config('login.live.access_token_url')\n r = requests.post(url, data=data, headers=headers)\n resp = r.json()\n\n if resp.get(\"error\") is not None:\n raise Exception(resp)\n\n return resp[\"access_token\"]", "def get_auth_token(controller_ip=DNAC, username=DNAC_USER, password=DNAC_PASSWORD):\n\n login_url = \"https://{0}:{1}/dna/system/api/v1/auth/token\".format(controller_ip, DNAC_PORT)\n# Change verify to TRUE\n result = requests.post(url=login_url, auth=HTTPBasicAuth(DNAC_USER, DNAC_PASSWORD), verify=True)\n result.raise_for_status()\n\n token = result.json()[\"Token\"]\n# print(resultss)\n tokens = result.json()\n # print(result.headers)\n # print(token) \n # print(tokens)\n return {\n \"controller_ip\": controller_ip,\n \"token\": token\n }", "def __get_authentication_token(self):\n cache = load_json(self._tokenPath)\n return cache[\"authentication_token\"]", "def get_token(token_method, acc=None, vo=None, idt=None, pwd=None):\n if not acc:\n acc = request.environ.get('HTTP_X_RUCIO_ACCOUNT')\n if not vo:\n vo = request.environ.get('HTTP_X_RUCIO_VO')\n if not idt:\n idt = request.environ.get('SSL_CLIENT_S_DN')\n if not (acc and vo and idt):\n return None\n try:\n if pwd:\n token = token_method(acc, idt, pwd, 'webui', request.environ.get('REMOTE_ADDR'), vo=vo).get('token')\n else:\n token = token_method(acc, idt, 'webui', request.environ.get('REMOTE_ADDR'), vo=vo).get('token')\n return token\n except:\n return None", "def from_dict(cls, dikt) -> \"Token\":\n return util.deserialize_model(dikt, cls)", "def verify_jwt(token):\n return jwt.decode(token.encode(), SECRET_KEY)", "def authenticate(token: JWT) -> AuthResponse:\n payload = jwt.decode(token)\n return payload[\"policies\"], payload[\"access_key\"]", "def retrieve_token():\n try:\n deserialized_message = json.loads(peek_app_token())\n\n expires_at = deserialized_message.get('expires_at')\n # Token is good, return it\n if expires_at and check_expired_time(expires_at):\n return deserialized_message.get('token')\n else: # Token expired, refresh it\n refresh_token()\n\n deserialized_message = peek_app_token()\n expires_at = deserialized_message.get('expires_at')\n # Token is good, return it\n try:\n assert(expires_at and check_expired_time(expires_at))\n return deserialized_message.get('token')\n except:\n raise # When all else fails\n\n except Exception as exc:\n log.error(f'Could not refresh token.\\n{exc}')\n traceback.print_exc(file=sys.stderr)\n\n return None", "def check_token(token: str, secret: str | List[str], max_age_seconds: int = 60 * 60 * 24) -> Any:\n return URLSafeTimedSerializer(secret).loads(token, max_age=max_age_seconds, salt=\"token\")", "def exchange_token(self, code):\n access_token_url = OAUTH_ROOT + '/access_token'\n params = {\n 'client_id': self.client_id,\n 'client_secret': self.client_secret,\n 'redirect_uri': self.redirect_uri,\n 'code': code,\n }\n resp = requests.get(access_token_url, params=params)\n if not resp.ok:\n raise MixcloudOauthError(\"Could not get access token.\")\n return resp.json()['access_token']", "def decode(self, data):\n return self.__cipher.decrypt(data)", "def parse_id_token(remote, id_token, claims_options,\n access_token=None, nonce=None):\n\n def load_key(header, payload):\n jwk_set = remote.fetch_jwk_set()\n try:\n return jwk.loads(jwk_set, header.get('kid'))\n except ValueError:\n jwk_set = remote.fetch_jwk_set(force=True)\n return jwk.loads(jwk_set, header.get('kid'))\n\n claims_params = dict(\n nonce=nonce,\n client_id=remote.client_id,\n )\n if access_token:\n claims_params['access_token'] = access_token\n claims_cls = CodeIDToken\n else:\n claims_cls = ImplicitIDToken\n claims = jwt.decode(\n id_token, key=load_key,\n claims_cls=claims_cls,\n claims_options=claims_options,\n claims_params=claims_params,\n )\n claims.validate(leeway=120)\n return UserInfo(claims)" ]
[ "0.7157648", "0.7092701", "0.70881104", "0.6925482", "0.6889764", "0.6852807", "0.6839684", "0.673713", "0.6725172", "0.6700684", "0.6668323", "0.6648689", "0.65112424", "0.6478294", "0.6444482", "0.6437185", "0.6422741", "0.64142865", "0.63824797", "0.63795084", "0.63528466", "0.6265342", "0.6196293", "0.6170813", "0.61567587", "0.61329365", "0.6048655", "0.60434705", "0.6041583", "0.60138494", "0.59301895", "0.5799098", "0.5767651", "0.5741722", "0.57225364", "0.5716671", "0.57018256", "0.563694", "0.5612611", "0.56063765", "0.5587727", "0.5582337", "0.5560802", "0.5480658", "0.54748875", "0.5458981", "0.5449612", "0.5448233", "0.541744", "0.54171765", "0.53897953", "0.5372374", "0.5330212", "0.53201634", "0.53185946", "0.5313654", "0.53000665", "0.5276954", "0.5267549", "0.52556455", "0.5247402", "0.524379", "0.52407944", "0.52256316", "0.52155524", "0.52112716", "0.5173155", "0.51618576", "0.5161736", "0.5159382", "0.5138791", "0.5124078", "0.51226306", "0.51201326", "0.5106527", "0.5103098", "0.50979596", "0.5084975", "0.50780916", "0.5076318", "0.505364", "0.5042552", "0.5037977", "0.5014154", "0.50103325", "0.5001077", "0.4985299", "0.49744362", "0.49626762", "0.49543512", "0.4952769", "0.49500066", "0.4947153", "0.49342906", "0.49320358", "0.49302167", "0.4928361", "0.49203348", "0.49195898", "0.49169123" ]
0.74529535
0
This function is to stop the spider
def spider_idle(self): self.logger.info('the queue is empty, wait for one minute to close the spider') time.sleep(30) req = self.next_requests() if req: self.schedule_next_requests() else: self.crawler.engine.close_spider(self, reason='finished')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def spider_closing(spider):\n print(\"Spiderclose\"*10)\n #reactor.stop()", "def stop(self):", "def stop(self):", "def _stop(self):", "def stop(self) -> None:\n ...", "def stop(self) -> None:", "def stop(self) -> None:", "def stop(self):\r\n pass", "def stop(self):\n\t\tpass", "def Stop(self) :\n\t\t...", "def stop():", "def stop():", "def stop():", "def stop():", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop(self):\n pass", "def stop (self):\n pass", "def stop (self):\n pass", "def stop() -> None:", "def stop(self):\n return", "def stop(self):\n super().stop()", "def stop(self) -> None:\n pass", "def spider_closing(spider):\n logger.info(\"Spider closed: %s\" % spider)\n if True:\n reactor.stop()", "def spider_closing(spider):\n log.msg(\"Closing reactor\", level=log.INFO)\n reactor.stop()", "def stop_traffic(self):\n self._logger.debug(\"stop_traffic()\")", "def stop(self):\n # All done!\n super().stop()", "def stop(self) :\n raise NotImplementedError(\"stop not implemented\")", "def stop(self):\n raise NotImplementedError", "def stop(self):\n raise NotImplementedError", "def stop(self):\n self._stop_flag = True", "def _stop(self):\n return True", "def stop(self) -> None:\n raise NotImplementedError(\"Base method not implemented\")", "def stop(self):\r\n raise NotImplementedError('method stop() is not implemented')", "async def _stop(self):\n return", "def stop(self):\n self.api.stop()", "async def stop(self):", "def stop(self):\n self.scion_sh('stop')", "def stop(self):\n raise NotImplementedError()", "def stop(self):\n raise NotImplementedError()", "def Stop(self):\n\t\tpayload = { \"Arg1\": self.href }\n\t\treturn self._execute('stop', payload=payload, response_object=None)", "def stop(self) -> None:\n raise NotImplementedError()", "def stop(self) -> None:\n raise NotImplementedError()", "def stopDetection(self):\n self.statusWrite(\"stop\")\n self.p.sleep()\n self.birdHere = 0", "def stop(self):\r\n self.running = False", "def stop(self):\r\n self.running = False", "def stop(self):\r\n self.terminating = True", "def do_stop(self,line):\n print \"Trying to stop the robot\"\n self.robot.tank(0,0)", "def _stop_bot(_event):\n pass", "def stop(self):\n self.halt = True", "def force_stop(self):\n #cancel any current request:\n self._cancel_current_request()", "def Stop(self, *_):\n self.Log('Stopping...')\n self._stop = True", "def spider_closing(spider):\n print(\"Spiderclose\"*10)\n #import sys #here as well, we can see both path on terminal added to sys.path ,\n #we added both in track.views.it will remain untill program terminated.\n #print(sys.path)\n #reactor.stop()", "def _stop(self):\n self._pi.stop()", "def stop(self, **kwargs):\n self.turn_off()", "def stop(self):\n self.running = False", "def stop(self):\n self.running = False", "def stop(self):\n self.running = False", "def stop(self):\n self.running = False", "def stop(self):\n self.running = False", "def post_stop(self):", "def stop(self):\n self._running = False\n self._logger.info(\"Locator finished main loop\")", "def stop(self):\n self.stopping = True\n self.queue_response(exc=ClientError())", "def stop(self):\n self.stopped = True", "def stop(self):\r\n self.stopped = True", "def stop(self):\n # print \"process shutdown complete\"", "def stop(self):\n self._run = False", "def stop(self):\n if self.debug:\n print(\"%s stop\" % self.name)\n self.force_exit()", "def stop(self):\n self._should_run = False", "def stop(self):\n self.on_stop()", "def stop(self):\n\n self.keep_running = False", "def foreceStop(self):\n self.__success = False\n self.stop()", "def stop(self):\n self._stop_signal = True", "def on_stop(self):\n self.write_log(\"策略停止\")", "def on_stop(self):\n self.write_log(\"策略停止\")", "def on_stop(self):\n self.write_log(\"策略停止\")", "def on_stop(self):\n self.write_log(\"策略停止\")", "def _gracefully_stop(self):\n pass", "def _stop(self, host):\n pass", "def _stop(self):\n self.display_end_message()", "def stop(self):\n self.killed = True", "def stop(self):\n self.send_stop()\n self.join()", "def stop(self):\n self.stopped = True", "def stop(self):\n self.stopped = True", "def need_stop(self, path):", "def stop(self):\n\n self.stop_thread = True", "def stop(self):\n\n self.active = False", "def stop(self):\n self.__publish_cmd(0.0, 0.0)\n\n return", "def stop(self):\n self.finished = True", "def ShutDown(self):\n self.stop = True", "def __exit__(self, *args):\n self.stop()", "def stop(self):\r\n self.stopped = True\r\n time.sleep(1)", "def stop(self):\n self.active = False" ]
[ "0.7635541", "0.75889057", "0.75889057", "0.74974936", "0.7481087", "0.7463915", "0.7463915", "0.7407182", "0.74046713", "0.7344371", "0.73300225", "0.73300225", "0.73300225", "0.73300225", "0.73190135", "0.73190135", "0.73190135", "0.73190135", "0.73190135", "0.73190135", "0.73190135", "0.73190135", "0.73190135", "0.73190135", "0.73190135", "0.7299476", "0.7299476", "0.7297525", "0.7269081", "0.7173329", "0.70821154", "0.69558483", "0.6914082", "0.68903416", "0.6881402", "0.6866882", "0.68611246", "0.68611246", "0.68503577", "0.6846156", "0.68135816", "0.68134516", "0.67844236", "0.6774234", "0.67496103", "0.6738854", "0.6733275", "0.6733275", "0.6731472", "0.6727895", "0.6727895", "0.6726416", "0.67241263", "0.67241263", "0.67190546", "0.67130405", "0.67116237", "0.6709678", "0.67075914", "0.66930723", "0.66920626", "0.66812885", "0.6678907", "0.6667638", "0.6667638", "0.6667638", "0.6667638", "0.6667638", "0.6655855", "0.6654852", "0.66469383", "0.6645545", "0.6636084", "0.6633369", "0.66302633", "0.66153234", "0.66113055", "0.661026", "0.66081905", "0.6586761", "0.65860254", "0.6585593", "0.6585593", "0.6585593", "0.6585593", "0.6576159", "0.65750575", "0.6565378", "0.6560033", "0.6552868", "0.6549372", "0.6549372", "0.6537396", "0.6534938", "0.65346086", "0.652126", "0.6516728", "0.6498294", "0.64955413", "0.649441", "0.64844525" ]
0.0
-1
Q to SSH This code solve a linear system of equations using Conjugate Gradient method
def pv2ssh(lon, lat, q, hg, c, nitr=1, name_grd=''): def compute_avec(vec,aaa,bbb,grd): avec=np.empty(grd.np0,) avec[grd.vp2] = aaa[grd.vp2]*((vec[grd.vp2e]+vec[grd.vp2w]-2*vec[grd.vp2])/(grd.dx1d[grd.vp2]**2)+(vec[grd.vp2n]+vec[grd.vp2s]-2*vec[grd.vp2])/(grd.dy1d[grd.vp2]**2)) + bbb[grd.vp2]*vec[grd.vp2] avec[grd.vp1] = vec[grd.vp1] return avec, if name_grd is not None: if os.path.isfile(name_grd): with open(name_grd, 'rb') as f: grd = pickle.load(f) else: grd = Grid(lon,lat) with open(name_grd, 'wb') as f: pickle.dump(grd, f) f.close() else: grd = Grid(lon,lat) ny,nx,=np.shape(hg) g=grd.g x=hg[grd.indi,grd.indj] q1d=q[grd.indi,grd.indj] aaa=g/grd.f01d bbb=-g*grd.f01d/c**2 ccc=+q1d aaa[grd.vp1]=0 bbb[grd.vp1]=1 ccc[grd.vp1]=x[grd.vp1] ##boundary condition vec=+x avec,=compute_avec(vec,aaa,bbb,grd) gg=avec-ccc p=-gg for itr in range(nitr-1): vec=+p avec,=compute_avec(vec,aaa,bbb,grd) tmp=np.dot(p,avec) if tmp!=0. : s=-np.dot(p,gg)/tmp else: s=1. a1=np.dot(gg,gg) x=x+s*p vec=+x avec,=compute_avec(vec,aaa,bbb,grd) gg=avec-ccc a2=np.dot(gg,gg) if a1!=0: beta=a2/a1 else: beta=1. p=-gg+beta*p vec=+p avec,=compute_avec(vec,aaa,bbb,grd) val1=-np.dot(p,gg) val2=np.dot(p,avec) if (val2==0.): s=1. else: s=val1/val2 a1=np.dot(gg,gg) x=x+s*p # back to 2D h=np.empty((ny,nx)) h[:,:]=np.NAN h[grd.indi,grd.indj]=x[:] return h
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def preCondConjugateGradientSolver(b, x, linsys_setup, eps, i_max, plotInterval, mapDir):\n datamaps, ninvs, beams, freqs, power_2d, precond_2d, clumaps, g_nu, \\\n map_prop = linsys_setup\n nx, ny, pixScaleX, pixScaleY = map_prop\n nCluster = len(clumaps[0])\n ksz = False\n if len(clumaps)==2: ksz=True\n \n \n # Calculate residual r = b - (A^-1) x\n r = b - applyMat(x, linsys_setup)\n d = r\n\n\n delta_new = numpy.inner(r,r)\n \n\n\n\n delta_o = delta_new\n delta_array = numpy.zeros(shape=(i_max))\n \n # Iterate CG solver until converged\n i = 0\n #i_max = 300\n while (i < i_max) and (delta_new > delta_o*eps**2.):\n if i==0: t = time.time()\n \n if i%plotInterval == 0 and i != 0:\n print \"\\tNumber of iterations in the CG:\", i\n x0 = x[:nx*ny] # CMB\n x1 = x[nx*ny:nx*ny+1] # Monopole\n x2 = x[nx*ny+1:nx*ny+1+nCluster] # TSZ\n if ksz: x3 = x[nx*ny+1+nCluster:nx*ny+1+2*nCluster]\n print \"\\tMonopole:\", x1\n print \"\\tTSZ:\", x2\n if ksz: print \"\\tKSZ:\", x3\n \n x0.shape = (ny,nx)\n a_l = numpy.fft.fft2(x0)\n a_l *= precond_2d\n x_test = numpy.real(numpy.fft.ifft2(a_l))\n plot(x_test,mapDir+'/CMB_%d.png'%i,'Reconstructed CMB', range=(-250., 250.))\n print delta_new, delta_o*eps**2.\n\n q = applyMat(d, linsys_setup)\n alpha = delta_new / (numpy.inner(d,q))\n x += alpha * d\n\n # What does this do? It's always false.\n if i/50. < numpy.int(i/50):\n r = b - applyMat(x, linsys_setup)\n else:\n r = r - alpha*q\n \n delta_old = delta_new\n delta_new = numpy.inner(r,r)\n beta = delta_new/delta_old\n d = r + beta * d\n #if i==0: print \"\\tEach iteration takes:\", time.time()-t\n i += 1\n\n x0 = x[:nx*ny].reshape((ny, nx))\n x1 = x[nx*ny:nx*ny+1]\n x2 = x[nx*ny+1:nx*ny+1+nCluster]\n if ksz:\n x3 = x[nx*ny+1+nCluster:nx*ny+1+2*nCluster]\n else:\n x3 = None\n \n a_l = numpy.fft.fft2(x0) * precond_2d\n x0 = numpy.real(numpy.fft.ifft2(a_l))\n\n \n # CMB, monopole, TSZ, KSZ\n return x0, x1, x2, x3", "def conjugate_gradient(self, batch_inputs, batch_unflattened_labels, batch_feature_sequence_lens, batch_size, num_epochs, model = None, damping_factor = 0.0, #seems to be correct, compare with conjugate_gradient.py\n verbose = False, preconditioner = None, gradient = None, second_order_type='gauss-newton', \n init_search_direction = None, structural_damping_const = 0.0):\n if verbose:\n print \"preconditioner is\", preconditioner\n excluded_keys = {'bias':['0'], 'weights':[]} \n if model == None:\n model = self.model\n \n tolerance = 5E-4\n gap_ratio = 0.1\n min_gap = 10\n #max_test_gap = int(np.max([np.ceil(gap_ratio * num_epochs), min_gap]) + 1)\n model_vals = list()\n \n model_update = RNNLM_Weight()\n model_update.init_zero_weights(model.get_architecture())\n \n outputs, hiddens = self.forward_pass(batch_inputs, model, return_hiddens=True)\n if gradient == None:\n gradient = self.calculate_gradient(batch_inputs, batch_unflattened_labels, batch_feature_sequence_lens, batch_size, model = model, hiddens = hiddens, outputs = outputs)\n \n if init_search_direction == None:\n model_vals.append(0)\n residual = gradient \n else:\n second_order_direction = self.calculate_second_order_direction(batch_inputs, batch_unflattened_labels, batch_feature_sequence_lens, batch_size, init_search_direction, \n model, second_order_type=second_order_type, hiddens = hiddens,\n structural_damping_const = structural_damping_const * damping_factor)\n residual = gradient + second_order_direction\n model_val = 0.5 * init_search_direction.dot(gradient + residual, excluded_keys)\n model_vals.append(model_val) \n model_update += init_search_direction \n \n if verbose:\n print \"model val at end of epoch is\", model_vals[-1]\n \n if preconditioner != None:\n preconditioned_residual = residual / preconditioner\n else:\n preconditioned_residual = residual\n search_direction = -preconditioned_residual\n residual_dot = residual.dot(preconditioned_residual, excluded_keys)\n for epoch in range(num_epochs):\n# print \"\\r \\r\", #clear line\n# sys.stdout.write(\"\\rconjugate gradient epoch %d of %d\\r\" % (epoch+1, num_epochs)), sys.stdout.flush()\n \n if damping_factor > 0.0:\n #TODO: check to see if ... + search_direction * damping_factor is correct with structural damping\n second_order_direction = self.calculate_second_order_direction(batch_inputs, batch_unflattened_labels, batch_feature_sequence_lens, batch_size, search_direction, model, second_order_type=second_order_type, hiddens = hiddens, \n structural_damping_const = damping_factor * structural_damping_const) + search_direction * damping_factor\n else:\n second_order_direction = self.calculate_second_order_direction(batch_inputs, batch_unflattened_labels, batch_feature_sequence_lens, batch_size, search_direction, model, second_order_type=second_order_type, hiddens = hiddens)\n \n curvature = search_direction.dot(second_order_direction,excluded_keys)\n if curvature <= 0:\n print \"curvature must be positive, but is instead\", curvature, \"returning current weights\"\n break\n \n step_size = residual_dot / curvature\n if verbose:\n print \"residual dot search direction is\", residual.dot(search_direction, excluded_keys)\n print \"residual dot is\", residual_dot\n print \"curvature is\", curvature\n print \"step size is\", step_size\n model_update += search_direction * step_size\n \n residual += second_order_direction * step_size\n model_val = 0.5 * model_update.dot(gradient + residual, excluded_keys)\n model_vals.append(model_val)\n if verbose:\n print \"model val at end of epoch is\", model_vals[-1]\n test_gap = int(np.max([np.ceil(epoch * gap_ratio), min_gap]))\n if epoch > test_gap: #checking termination condition\n previous_model_val = model_vals[-test_gap]\n if (previous_model_val - model_val) / model_val <= tolerance * test_gap and previous_model_val < 0:\n print \"\\r \\r\", #clear line\n sys.stdout.write(\"\\rtermination condition satisfied for conjugate gradient, returning step\\r\"), sys.stdout.flush()\n break\n if preconditioner != None:\n preconditioned_residual = residual / preconditioner\n else:\n preconditioned_residual = residual\n new_residual_dot = residual.dot(preconditioned_residual, excluded_keys)\n conjugate_gradient_const = new_residual_dot / residual_dot\n search_direction = -preconditioned_residual + search_direction * conjugate_gradient_const\n residual_dot = new_residual_dot\n return model_update, model_vals", "def Conjugate_Gradient(A, b, x0, max_iterations=200, epsilon=1e-2):\n\n last_x = x0\n last_r = b - A @ x0\n last_p = last_r\n curr_iter = 0\n residual_queue = []\n convergences_queue = []\n while curr_iter < max_iterations:\n Ap = A @ last_p\n alpha = (last_r.transpose() @ last_r) / (last_p.transpose() @ Ap)\n curr_x = last_x + alpha * last_p\n curr_r = last_r - alpha * Ap\n c = np.linalg.norm(A @ curr_x - b, 2) / np.linalg.norm(b, 2)\n convergences_queue.append(np.linalg.norm(A @ curr_x - b, 2) / np.linalg.norm(A @ last_x - b, 2))\n residual_queue.append(np.linalg.norm(A @ curr_x - b, 2))\n if c < epsilon:\n print_graph(residual_queue, curr_iter, \"residual\", \"Conjugate Gradient\")\n print_graph(convergences_queue, curr_iter, \"convergence rate\", \"Conjugate Gradient\")\n print(\"Number of Iterations: \" + str(curr_iter))\n return curr_x\n beta = (curr_r.transpose() @ curr_r) / (last_r.transpose() @ last_r)\n last_p = curr_r + beta * last_p\n last_x = curr_x\n last_r = curr_r\n curr_iter += 1\n return \"failed\"", "def clebsch_gordan((J1,M1),(J2,M2),(J3,M3)):\n cg=(-1)**(J2-J1-M3)*math.sqrt(2*J3+1)*pygsl.sf.coupling_3j(int(2*J1), int(2*J2), int(2*J3), int(2*M1), int(2*M2),int(-2*M3))[0]\n #\n return cg", "def reaction_forces(Ca, la, x1, x2, x3, xa, h, d1, d3, theta, P, q, E, I):\r\n \r\n equation_matrix = np.array([[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], \r\n [1, 0, 0, 1, 0, 1, 0, np.sin(theta), 0, 0, 0, 0, (P*np.sin(theta)+q*la*np.cos(theta))], \r\n [0, 1, 0, 0, 1, 0, 1, np.cos(theta), 0, 0, 0, 0, (P*np.cos(theta)-q*la*np.sin(theta))],\r\n \r\n [-(Ca/4-h/2), 0, 0, -(Ca/4-h/2) ,0 , -(Ca/4-h/2), 0, (np.cos(theta)*h/2-np.sin(theta)*Ca/4), 0, 0, 0, 0, (P*np.cos(theta)*h/2*-P*np.sin(theta)*Ca/4)], \r\n [0, (x2-x1), 0, 0, 0, 0, -(x3-x2), (np.cos(theta)*xa/2), 0, 0, 0, 0, (-P*np.cos(theta)*xa/2+q*la*np.sin(theta)*(la/2-x2))], \r\n [-(x2-x1), 0, 0, 0, 0, (x3-x2), 0, -np.sin(theta)*xa/2, 0, 0, 0, 0, (P*np.sin(theta)*xa/2+q*la*np.cos(theta)*(la/2-x2))], \r\n \r\n [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, x1, 1, -q*np.sin(theta)*((x1**4)/24)], \r\n [0, ((x2-x1)**3)/6, 0, 0, 0, 0, 0, ((np.cos(theta))*((xa/2)**3)/6), 0, 0, x2, 1, (-q*np.sin(theta)*((x2**4)/24))], \r\n [0, ((x3-x1)**3)/6, 0, 0, ((x3-x2)**3)/6, 0, 0, ((np.cos(theta))*((x3-x2+xa/2)**3)/6), 0, 0, x3, 1, (-q*np.sin(theta)*((x3**4)/24)+P*(np.cos(theta))*(x3-x2-xa/2)**3/6)], \r\n [0, 0, 0, 0, 0, 0, 0, 0, x1, 1, 0, 0, (-E*I*d1*+q*np.cos(theta)*(x1**4)/24)], \r\n [(((x2-x1)**3)/6), 0, 0, 0, 0, 0, 0, ((-np.sin(theta))*((xa/2)**3)/6), x2, 1, 0, 0, (q*np.cos(theta)*(x2**4)/24)], \r\n [(((x3-x1)**3)/6),0,0,(((x3-x2)**3)/6),0,0,0,((-np.sin(theta))*((x3-x2+xa/2)**3)/6),x3,1,0,0,(-E*I*d3*+q*np.cos(theta)*((x3**4)/24)+P/6*np.sin(theta)*(x3-x2-xa/2)**3)]])\r\n \r\n \r\n unknown_matrix = equation_matrix[:,:-1]\r\n constant_matrix = equation_matrix[:,-1]\r\n \r\n \r\n solution_matrix = np.linalg.solve(unknown_matrix,constant_matrix)\r\n \r\n solution_matrix = solution_matrix/1000\r\n \r\n (R1y, R1z, R2x, R2y, R2z, R3y, R3z, RI, c1, c2, c3, c4) = tuple(solution_matrix)\r\n \r\n print((R1y, R1z, R2x, R2y, R2z, R3y, R3z, RI, c1, c2, c3, c4))", "def conjugate(self, ???):", "def conjugate(self):\n pass", "def __calc_jacobian_matrix(self):\n\n tf_matrix_first_to_last = self.tf_matrices_list[-1]\n self.jacobian_matrix = [diff(tf_matrix_first_to_last[:3, -1], self.q[i]).reshape(1, 3) for i in range(len(self.q))]\n self.jacobian_matrix = Matrix(self.jacobian_matrix).T # .T returns the transpose of matrix.", "def incompatibility_solve_cg(self, useAMS=True):\n \n zero = Expression((\"0.0\", \"0.0\", \"0.0\"), degree=1)\n bc = DirichletBC(self.PN, zero, DirichletBoundary())\n \n T1 = Function(self.PN) # Solution for the curl curl problem\n T2 = Function(self.PN) # Solution for the curl curl problem\n T3 = Function(self.PN) # Solution for the curl curl problem\n\n if useAMS:\n \n # Set operator for the linear solver\n L_X = inner(self.strain_diff_1, curl(self.inc_v0))*dx\n A_X, b_X = assemble_system(self.a_X, L_X, bc)\n self.ksp_X.setOperators(as_backend_type(A_X).mat())\n self.ksp_X.solve(as_backend_type(b_X).vec(), as_backend_type(T1.vector()).vec())\n\n # Show linear solver details\n self.ksp_X.view()\n\n # Solve 2nd system\n L_X = inner(self.strain_diff_2, curl(self.inc_v0))*dx\n A_X, b_X = assemble_system(self.a_X, L_X, bc)\n self.ksp_X.setOperators(as_backend_type(A_X).mat())\n self.ksp_X.solve(as_backend_type(b_X).vec(), as_backend_type(T2.vector()).vec())\n\n # Solve 3nd system\n L_X = inner(self.strain_diff_3, curl(self.inc_v0))*dx\n A_X, b_X= assemble_system(self.a_X, L_X, bc)\n self.ksp_X.setOperators(as_backend_type(A_X).mat())\n self.ksp_X.solve(as_backend_type(b_X).vec(), as_backend_type(T3.vector()).vec())\n \n else:\n\n ### vanilla CG works with potential as RHS\n\n L_X = inner(self.strain_diff_1, curl(self.inc_v0))*dx\n solve(self.a_X == L_X, T1, bc, \n solver_parameters={'linear_solver': 'cg', 'preconditioner': 'jacobi'}) \n\n L_X = inner(self.strain_diff_2, curl(self.inc_v0))*dx\n solve(self.a_X == L_X, T2, bc, \n solver_parameters={'linear_solver': 'cg', 'preconditioner': 'jacobi'}) \n\n L_X = inner(self.strain_diff_3, curl(self.inc_v0))*dx\n solve(self.a_X == L_X, T3, bc, \n solver_parameters={'linear_solver': 'cg', 'preconditioner': 'jacobi'})\n\n return project( self.X_0(curl(T1),curl(T2),curl(T3)), \n self.TFS, solver_type=\"cg\", preconditioner_type=\"ilu\")", "def ConjugateGradient(Pos, dx, EFracTolLS, EFracTolCG, M, L, Cut):\n PE, Forces = mdlib.calcenergyforces(Pos, M, L, Cut, np.zeros_like(Pos))\n Dir = Forces\n OldPE = 1.e300\n while abs(PE - OldPE) > EFracTolCG * abs(PE):\n OldPE = PE\n PE, Pos = LineSearch(Pos, Dir, dx, EFracTolLS, M, L, Cut)\n OldForces = Forces.copy()\n PE, Forces = mdlib.calcenergyforces(Pos, M, L, Cut, Forces)\n Gamma = np.sum((Forces - OldForces) * Forces) / np.sum(OldForces * OldForces)\n Dir = Forces + Gamma * Dir\n return PE, Pos", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def conjugate(self, *args, **kwargs): # real signature unknown\n pass", "def solve_cg_eps(self, params, r, sz, energy, aux):\n\n dp0_i, g2_i, itr, nsamples = aux\n\n jac = self.vmap_getder(params, r, sz, nsamples)\n #jac_T = conj_transpose(jac)\n\n f_i = jnp.real(-2 * psum(jnp.matmul(energy, jnp.conjugate(jac)), axis_name='p') / nsamples)\n g2_i = self.beta * g2_i + (1. - self.beta) * f_i**2\n g2h_i = jnp.sqrt(g2_i / (1. - self.beta**itr))\n\n# cg_mult = lambda v_i: jnp.real(psum(jnp.matmul(jnp.matmul(v_i, jac_T), jac), axis_name='p') / nsamples) + self.eps * (0.001 + g2h_i) * v_i\n cg_mult = lambda v_i: jnp.real(psum(jnp.matmul(jnp.matmul(jnp.conjugate(jac), v_i), jac), axis_name='p') / nsamples) + self.eps * (0.001 + g2h_i) * v_i\n\n dp_i, info = jax.scipy.sparse.linalg.cg(cg_mult, f_i, x0=dp0_i, tol=1e-5, atol=0.0, maxiter=200)\n return dp_i, g2_i", "def weight_update_conjugate_gradient(self, network):\n # compute beta: Fletcher-Reeves\n num = 0.0\n for l, layer in enumerate(network.layers):\n num += np.sum(self.dc_db[l] ** 2)\n num += np.sum(self.dc_dq[l] ** 2)\n num += np.sum(self.dc_drx_inp[l] ** 2)\n num += np.sum(self.dc_dry_inp[l] ** 2)\n num += np.sum(self.dc_drx_pos_out[l] ** 2)\n num += np.sum(self.dc_dry_pos_out[l] ** 2)\n num += np.sum(self.dc_drx_neg_out[l] ** 2)\n num += np.sum(self.dc_dry_neg_out[l] ** 2)\n\n # Initialize velocities to zero for momentum\n if self.vel_b is None or self.vel_q is None:\n self.ms_b = []\n self.ms_q = []\n self.ms_rx_inp = []\n self.ms_ry_inp = []\n self.ms_rx_pos_out = []\n self.ms_ry_pos_out = []\n self.ms_rx_neg_out = []\n self.ms_ry_neg_out = []\n for l, layer in enumerate(network.layers):\n self.ms_b.append(np.zeros(layer.b.shape))\n self.ms_q.append(np.zeros(layer.q.shape))\n self.ms_rx_inp.append(np.zeros(layer.input_size))\n self.ms_ry_inp.append(np.zeros(layer.input_size))\n self.ms_rx_pos_out.append(np.zeros(layer.output_size))\n self.ms_ry_pos_out.append(np.zeros(layer.output_size))\n self.ms_rx_neg_out.append(np.zeros(layer.output_size))\n self.ms_ry_neg_out.append(np.zeros(layer.output_size))\n\n # Take steepest descent step\n for l, layer in enumerate(network.layers):\n layer.b -= self.alpha * self.dc_db[l]\n layer.q -= self.alpha * self.dc_dq[l]\n layer.rx_inp -= self.alpha * self.dc_drx_inp[l]\n layer.ry_inp -= self.alpha * self.dc_dry_inp[l]\n layer.rx_pos_out -= self.alpha * self.dc_drx_pos_out[l]\n layer.ry_pos_out -= self.alpha * self.dc_dry_pos_out[l]\n layer.rx_neg_out -= self.alpha * self.dc_drx_neg_out[l]\n layer.ry_neg_out -= self.alpha * self.dc_dry_neg_out[l]\n\n else:\n # compute beta\n beta = num / self.denominator\n\n # compute s_n\n for l, layer in enumerate(network.layers):\n self.ms_b[l] = -self.alpha * self.dc_db[l] + beta * self.ms_b[l]\n self.ms_q[l] = -self.alpha * self.dc_dq[l] + beta * self.ms_q[l]\n self.ms_rx_inp[l] = -self.alpha * self.dc_drx_inp[l] + beta * self.ms_rx_inp[l]\n self.ms_ry_inp[l] = -self.alpha * self.dc_dry_inp[l] + beta * self.ms_ry_inp[l]\n self.ms_rx_pos_out[l] = -self.alpha * self.dc_drx_pos_out[l] + beta * self.ms_rx_pos_out[l]\n self.ms_ry_pos_out[l] = -self.alpha * self.dc_dry_pos_out[l] + beta * self.ms_ry_pos_out[l]\n self.ms_rx_neg_out[l] = -self.alpha * self.dc_drx_neg_out[l] + beta * self.ms_rx_neg_out[l]\n self.ms_ry_neg_out[l] = -self.alpha * self.dc_dry_neg_out[l] + beta * self.ms_ry_neg_out[l]\n\n # Take step\n for l, layer in enumerate(network.layers):\n layer.b += self.alpha * self.ms_b[l]\n layer.q += self.alpha * self.ms_q[l]\n layer.rx_inp += self.alpha * self.ms_rx_inp[l]\n layer.ry_inp += self.alpha * self.ms_ry_inp[l]\n layer.rx_pos_out += self.alpha * self.ms_rx_pos_out[l]\n layer.ry_pos_out += self.alpha * self.ms_ry_pos_out[l]\n layer.rx_neg_out += self.alpha * self.ms_rx_neg_out[l]\n layer.ry_neg_out += self.alpha * self.ms_ry_neg_out[l]\n\n # store num for next iteration to be used as denominator\n self.denominator = num", "def optimize(self):\n prm = (self.b,self.c)\n d = self.d\n no = int(d*d)\n bounds = [(-1,1)]*no\n resG = differential_evolution(inpSc.entBias, bounds, args = prm, popsize = 40, disp = False)\n\n xOpt = resG.x\n xOpt = xOpt/(np.linalg.norm(xOpt))\n\n #Refine the global optimization by performing a second local optimizaiton\n x0 = xOpt\n\n res = minimize(inpSc.entBias, x0, args = prm, method='BFGS', options={'disp': False})\n xOpt = res.x\n xOpt = xOpt/(np.linalg.norm(xOpt))\n self.rhoOp = inpSc.getMat(xOpt, d)\n self.Q1 = -res.fun", "def Jacobi(A):\n # Manda a llamar a la funcion para hacer el intercambio de los renglones necesarios, de tal manera que la matriz resultante sea una matriz diagonal dominante\n A = MatDiagDom.Matriz_Diagonal_Dominante(A)\n\n # Imprime la matriz\n np.set_printoptions(precision = 6, suppress = True)\n print(\"\\nMetodo de Jacobi\\n\")\n print(\"\\n\", A, \"\\n\")\n\n # Pide al usuario los valores necesarios para el metodo\n tolerancia = float(input(\"\\nIngrese el error de tolerancia para el metodo de Jacobi: \"))\n limite = float(input(\"Ingrese el limite de iteraciones para el metodo de Jacobi: \"))\n print()\n\n print(\"Ingrese el vector incial para comenzar con el metodo de Jacobi\\n\")\n # Crea el vector inicial para comenzar con el metdo y se llena en el bucle for\n x = np.empty(A.shape[0], dtype = 'f')\n for comp in range(A.shape[0]):\n x[comp] = float(input(f\"Ingrese la componente {comp + 1} del vector: \"))\n\n # Bucle anidado que modifica la matriz A para poder aplicar el metodo de Jacobi\n for fil in range(A.shape[0]):\n denominador = A[fil, fil]\n for col in range(A.shape[1]):\n # Condicional para cambiar la componente de la diagonal por cero\n if col == fil:\n A[fil, col] = 0\n else:\n if (col + 1) == A.shape[1]:\n A[fil, col] /= denominador\n else:\n A[fil, col] /= -denominador\n\n T = np.copy(A[:, :A.shape[0]])\n\n c = np.copy(A[:, A.shape[0]:])\n\n # Calcula la norma de 'x'\n normaX1 = np.linalg.norm(x)\n\n cont = 0\n\n # Bucle que se repetira hasta que el error sea menor o igual al permitido\n while True:\n # Multiplica la matriz 'T' por el vector 'x' y le suma el vector 'c'\n x = np.matmul(T, np.reshape(x, (A.shape[0], 1))) + np.reshape(c, (A.shape[0], 1))\n\n cont += 1\n\n # Calcula la norma de 'x'\n normaX2 = np.linalg.norm(x)\n\n # Calcula el error aproximado porcentual y almacena el resultado en la variable 'errorAproxPorcen'\n errorAproxPorcen = ((normaX2 - normaX1) / normaX2) * 100\n\n if abs(errorAproxPorcen) < tolerancia:\n break\n\n if cont == limite:\n # En caso que se hayan hecho 'x' iteraciones, entonces suponemos que\n # no se ha determinado el resultado y se detiene la ejecucion del programa\n print(\"\\n\\nSe ha llegado al limite de iteraciones y no se ha encontrado un posible \", end = \"\")\n print(\"resultado aplicando el Metodo de Jacobi para resolver el sistema de ecuaciones lineales\")\n print(\"Pruebe con otro vector inicial o ingrese un limite de iteraciones mayor\\n\\n\")\n sys.exit(1)\n\n # Se copia el valor de 'normaX2' en la variable 'normaX1' para que en la siguiente iteracion se considere la norma que se acaba de calcular\n normaX1 = normaX2\n\n print(\"\\nUna aproximacion a la solucion es:\\n\", np.transpose(x).reshape(A.shape[0], 1))\n print()\n\n return x", "def conjugate(quats):\n res = np.zeros(quats.shape)\n res[:,0]=quats[:,0]\n res[:,1]=-quats[:,1]\n res[:,2]=-quats[:,2]\n res[:,3]=-quats[:,3]\n \n return res", "def _compute_jacobian(self):\n q_sum = np.cumsum(self._q)\n self._sines = np.sin(q_sum)\n self._cosines = np.cos(q_sum)\n (s_1, s_12, s_123) = self._sines\n (c_1, c_12, c_123) = self._cosines\n self._jacobian = np.array([\n np.cumsum([\n self._jnt_lengths[2] * c_123,\n self._jnt_lengths[1] * c_12,\n self._jnt_lengths[0] * c_1\n ])[::-1], # compute jacobian 1st row\n np.cumsum([\n -self._jnt_lengths[2] * s_123,\n -self._jnt_lengths[1] * s_12,\n -self._jnt_lengths[0] * s_1\n ])[::-1] # jacobian 2nd row\n ])\n self._jacobian_psinv = np.matmul(\n self._jacobian.T,\n np.linalg.inv(np.matmul(self._jacobian, self._jacobian.T))\n )", "def linearize_and_solve(g):\n\n # initialize the sparse H and the vector b\n H = np.zeros((len(g.x), len(g.x)), dtype='float')\n b = np.zeros(len(g.x), dtype='float')\n\n # set flag to fix gauge\n needToAddPrior = True\n Fx = 0\n\n # compute the addend term to H and b for each of our constraints\n print('linearize and build system')\n\n for edge in g.edges:\n\n # pose-pose constraint\n if edge.Type == 'P':\n\n # compute idx for nodes using lookup table\n fromIdx = g.lut[edge.fromNode]\n toIdx = g.lut[edge.toNode]\n\n # get node state for the current edge\n x_i = g.x[fromIdx:fromIdx + 3]\n x_j = g.x[toIdx:toIdx + 3]\n\n # (TODO) compute the error and the Jacobians\n e, A, B = linearize_pose_pose_constraint(\n x_i, x_j, edge.measurement)\n\n # # (TODO) compute the terms\n b_i = e.transpose() @ edge.information @ A\n b_j = e.transpose() @ edge.information @ B\n H_ii = A.transpose() @ edge.information @ A\n H_ij = A.transpose() @ edge.information @ B\n H_jj = B.transpose() @ edge.information @ B\n\n # (TODO) add the terms to H matrix and b\n H[fromIdx:fromIdx + 3, fromIdx:fromIdx + 3] += H_ii\n H[toIdx:toIdx + 3, toIdx:toIdx + 3] += H_jj\n H[fromIdx:fromIdx + 3, toIdx:toIdx + 3] += H_ij\n H[toIdx:toIdx + 3, fromIdx:fromIdx + 3, ] += H_ij.transpose()\n b[fromIdx:fromIdx + 3] += b_i[0, :]\n b[toIdx:toIdx + 3] += b_j[0, :]\n\n # Add the prior for one pose of this edge\n # This fixes one node to remain at its current location\n if needToAddPrior:\n H[fromIdx:fromIdx + 3, fromIdx:fromIdx +\n 3] = H[fromIdx:fromIdx + 3,\n fromIdx:fromIdx + 3] + 1000 * np.eye(3)\n needToAddPrior = False\n\n # pose-pose constraint\n elif edge.Type == 'L':\n print(\"you shouldn't be here...\")\n # compute idx for nodes using lookup table\n fromIdx = g.lut[edge.fromNode]\n toIdx = g.lut[edge.toNode]\n\n # get node states for the current edge\n x = g.x[fromIdx:fromIdx + 3]\n l = g.x[toIdx:toIdx + 2]\n\n # (TODO) compute the error and the Jacobians\n e, A, B = linearize_pose_landmark_constraint(\n x, l, edge.measurement)\n\n # (TODO) compute the terms\n b_i = e.transpose() @ edge.information @ A\n b_j = e.transpose() @ edge.information @ B\n H_ii = A.transpose() @ edge.information @ A\n H_ij = A.transpose() @ edge.information @ B\n H_jj = B.transpose() @ edge.information @ B\n\n # (TODO )add the terms to H matrix and b\n H[fromIdx:fromIdx + 3, fromIdx:fromIdx + 3] += H_ii\n H[toIdx:toIdx + 2, toIdx:toIdx + 2] += H_jj\n H[fromIdx:fromIdx + 3, toIdx:toIdx + 2] += H_ij\n H[toIdx:toIdx + 2, fromIdx:fromIdx + 3, ] += H_ij.transpose()\n b[fromIdx:fromIdx + 3] = b_i\n b[toIdx:toIdx + 2] = b_j\n # solve system\n dx = np.linalg.solve(H, b)\n\n return dx", "def calc_jacobian(\n model: nn.Module,\n latents: torch.Tensor,\n normalize: bool = False,\n eps: float = 1e-8,\n vectorize=False,\n reverse_ad=True,\n norm_range=True,\n norm_diagonal=False,\n) -> torch.Tensor:\n # set to eval mode but remember original state\n in_training: bool = model.training\n model.eval() # otherwise we will get 0 gradients\n with torch.set_grad_enabled(True):\n jacob = []\n input_vars = latents.clone().requires_grad_(True)\n\n output_vars = model(input_vars)\n if not vectorize:\n for i in range(output_vars.shape[1]):\n jacob.append(\n torch.autograd.grad(\n output_vars[:, i : i + 1],\n input_vars,\n create_graph=True,\n grad_outputs=torch.ones(output_vars[:, i : i + 1].shape).to(\n output_vars.device\n ),\n )[0].detach()\n )\n\n jacobian = torch.stack(jacob, 1)\n else:\n from functorch import vmap, jacrev, jacfwd\n\n if reverse_ad is True:\n jac_fn = jacrev\n else:\n jac_fn = jacfwd\n\n sample_jacobian = jac_fn(model.forward, argnums=0)\n jacobian = vmap(\n lambda x: sample_jacobian(torch.unsqueeze(x, 0)), in_dims=0\n )(input_vars).squeeze()\n\n if normalize is True:\n # normalize the Jacobian by making it volume preserving\n # jacobian /= jacobian.det().abs().pow(1 / jacobian.shape[-1]).reshape(-1, 1, 1)\n\n # normalize to make variance to 1\n # norm_factor = (output_vars.std(dim=0) + 1e-8)\n # jacobian /= norm_factor.reshape(1, 1, -1)\n if norm_range is True:\n # normalize range to [0;1]\n dim_range = (\n (output_vars.max(dim=0)[0] - output_vars.min(dim=0)[0])\n .abs()\n .reshape(-1, 1)\n )\n\n jacobian /= dim_range + eps\n elif norm_diagonal is True:\n assert (dim := jacobian.shape[1]) == jacobian.shape[2]\n jacobian /= jacobian[:, (r := torch.arange(dim)), r].unsqueeze(-1) + eps\n\n # set back to original mode\n if in_training is True:\n model.train()\n\n return jacobian", "def run(self, diffusion_coefficients):\n mat = self.buildmatrix(diffusion_coefficients)\n\n rhs = np.zeros(self.size)\n rhs[0] = -(diffusion_coefficients[0] + diffusion_coefficients[1]) * self.phi0\n\n if self.verbose > 0:\n print(\"System of equations:\")\n for i in range(mat.shape[0]):\n row = [\"{0:3g}*x{1}\".format(mat[i, j], j + 1) for j in range(mat.shape[1])]\n if self.verbose > 0:\n print(\"[{0}] = [{1:3g}]\".format(\" + \".join(row), rhs[i]))\n\n if parameters.solver == 'jacobi':\n x = self.jacobi_solver(mat, rhs)\n elif parameters.solver == 'gauss-seidel':\n x = self.gauss_seidel_solver(mat, rhs)\n elif parameters.solver == 'tridiag':\n x = self.tridiag_solver(mat, rhs)\n else:\n sys.exit('Unknown solver')\n\n if self.verbose > 1:\n print(\"Solution: {0}\".format(x))\n error = np.dot(mat, x) - rhs\n if self.verbose > 1:\n print(\"Error: {0}\".format(error))\n x = np.insert(x, 0, self.phi0)\n x = np.append(x, 0)\n return x", "def solve_prep(self):\n\n par = self.par\n sol = self.sol\n\n # a. retirement\n sol.m_ret = np.zeros((par.T,par.Nm_ret))\n sol.c_ret = np.zeros((par.T,par.Nm_ret))\n sol.a_ret = np.zeros((par.T,par.Nm_ret))\n sol.inv_v_ret = np.zeros((par.T,par.Nm_ret))\n sol.inv_vm_ret = np.zeros((par.T,par.Nm_ret))\n sol.inv_vn_ret = np.zeros((par.T,par.Nm_ret))\n\n # b. working\n if par.solmethod == 'G2EGM':\n\n sol.c = np.zeros((par.T,par.Nn,par.Nm))\n sol.d = np.zeros((par.T,par.Nn,par.Nm))\n sol.inv_v = np.zeros((par.T,par.Nn,par.Nm))\n sol.inv_vm = np.zeros((par.T,par.Nn,par.Nm))\n sol.inv_vn = np.zeros((par.T,par.Nn,par.Nm))\n\n sol.ucon_c = np.zeros((par.T,par.Nn,par.Nm))\n sol.ucon_d = np.zeros((par.T,par.Nn,par.Nm))\n sol.ucon_v = np.zeros((par.T,par.Nn,par.Nm))\n\n sol.dcon_c = np.zeros((par.T,par.Nn,par.Nm))\n sol.dcon_d = np.zeros((par.T,par.Nn,par.Nm))\n sol.dcon_v = np.zeros((par.T,par.Nn,par.Nm))\n\n sol.acon_c = np.zeros((par.T,par.Nn,par.Nm))\n sol.acon_d = np.zeros((par.T,par.Nn,par.Nm))\n sol.acon_v = np.zeros((par.T,par.Nn,par.Nm))\n sol.con_c = np.zeros((par.T,par.Nn,par.Nm))\n sol.con_d = np.zeros((par.T,par.Nn,par.Nm))\n sol.con_v = np.zeros((par.T,par.Nn,par.Nm))\n\n sol.z = np.zeros((par.T,par.Nn,par.Nm))\n\n sol.w = np.zeros((par.T-1,par.Nb_pd,par.Na_pd))\n sol.wa = np.zeros((par.T-1,par.Nb_pd,par.Na_pd))\n sol.wb = np.zeros((par.T-1,par.Nb_pd,par.Na_pd))\n \n elif par.solmethod == 'NEGM':\n\n sol.c = np.zeros((par.T,par.Nn,par.Nm))\n sol.d = np.zeros((par.T,par.Nn,par.Nm))\n sol.inv_v = np.zeros((par.T,par.Nn,par.Nm))\n sol.inv_vn = np.zeros((0,0,0))\n sol.inv_vm = np.zeros((par.T,par.Nn,par.Nm))\n\n sol.w = np.zeros((par.T-1,par.Nb_pd,par.Na_pd))\n sol.wa = np.zeros((par.T-1,par.Nb_pd,par.Na_pd))\n sol.wb = np.zeros((0,0,0))\n \n sol.c_pure_c = np.zeros((par.T,par.Nb_pd,par.Nm))\n sol.inv_v_pure_c = np.zeros((par.T,par.Nb_pd,par.Nm))", "def conjgrad_scipy(A, Y, sigma, tol=1e-4):\n import scipy.sparse.linalg\n Y, m, n, d, matrix_in = _format_system(A, Y)\n\n damp = m * sigma**2\n calcAA = lambda x: np.dot(A.T, np.dot(A, x)) + damp * x\n G = scipy.sparse.linalg.LinearOperator(\n (n, n), matvec=calcAA, matmat=calcAA, dtype=A.dtype)\n B = np.dot(A.T, Y)\n\n X = np.zeros((n, d), dtype=B.dtype)\n infos = np.zeros(d, dtype='int')\n itns = np.zeros(d, dtype='int')\n for i in range(d):\n def callback(x):\n itns[i] += 1 # use the callback to count the number of iterations\n\n X[:, i], infos[i] = scipy.sparse.linalg.cg(\n G, B[:, i], tol=tol, callback=callback)\n\n info = {'rmses': _rmses(A, X, Y), 'iterations': itns, 'info': infos}\n return X if matrix_in else X.flatten(), info", "def conj(q):\n q = np.array([q[0]])\n q[0,1]=-q[0,1]\n q[0,2]=-q[0,2]\n q[0,3]=-q[0,3]\n complexconjugate = quatreal(q)\n return complexconjugate", "def __init__(self):\n GinacFunction.__init__(self, \"conjugate\",\n conversions=dict(sympy='conjugate'))", "def solve(self):", "def newtonJacobian(self,r):\n #x_vec=np.array(r)\n x=r[0]\n y=r[1]\n jacobi=np.zeros([2,2], float)\n \n \n jacobi[0][0]=(4.0*(self.x_0-x)**2.0-2.0)*self.sfunc(x,y)\n jacobi[1][1]=(4.0*(self.y_0-y)**2.0-2.0)*self.sfunc(x,y)\n jacobi[1][0]=4.0*(self.x_0-x)*(self.y_0-y)*self.sfunc(x,y)\n jacobi[0][1]=jacobi[1][0]\n #print \"newton jacobian is \",jacobi\n try:\n return mat.inv(jacobi)\n except:\n print \"singular jacobi not invertable\"\n return 0", "def solve(self, gradients):\n return", "def J(cst, x):\n [u0, v0, u1, v1, u2, v2, coeffs] = cst\n [u, v, g1, g2, g3] = x\n df1du = 2*u*g3**2 - 2*g3*u0 + 2*g3*coeffs[3]*(g1*u1-u0) + 2*g3*coeffs[4]*(g2*u2-u0)\n df1dv = -2*v*g3**2 + 2*g3*v0 - 2*g3*coeffs[3]*(g1*v1-v0) - 2*g3*coeffs[4]*(g2*v2-v0)\n df1dg1 = 2*g1*coeffs[0]*(u1**2-v1**2) + 2*(v1*v0-u1*u0)*(coeffs[0]+coeffs[1]+coeffs[3]) + 2*g2*coeffs[1]*(u1*u2-v1*v2) + 2*g3*coeffs[3]*(u1*u-v1*v)\n df1dg2 = 2*g2*coeffs[2]*(u2**2-v2**2) + 2*(v2*v0-u2*u0)*(coeffs[1]+coeffs[2]+coeffs[4]) + 2*g1*coeffs[1]*(u1*u2-v1*v2) + 2*g3*coeffs[4]*(u2*u-v2*v)\n df1dg3 = 2*g3*(u**2-v**2) + 2*(v*v0-u*u0)*(coeffs[3]+coeffs[4]+1) + 2*g1*coeffs[3]*(u1*u-v1*v) + 2*g2*coeffs[4]*(u2*u-v2*v)\n\n df2du = 0\n df2dv = 2*v*g3**2 + 2*g3*(-v0 + coeffs[3]*(g1*v1-v0) + coeffs[4]*(g2*v2-v0))\n df2dg1 = 2*g1*coeffs[0]*(v1**2-1) + 2*(1-v1*v0)*(coeffs[0]+coeffs[1]+coeffs[3]) + 2*g2*coeffs[1]*(v1*v2-1) + 2*g3*coeffs[3]*(v1*v-1)\n df2dg2 = 2*g2*coeffs[2]*(v2**2-1) + 2*(1-v2*v0)*(coeffs[1]+coeffs[2]+coeffs[4]) + 2*g1*coeffs[1]*(v1*v2-1) + 2*g3*coeffs[4]*(v2*v-1)\n df2dg3 = 2*g3*(v**2-1) + 2*(1-v*v0)*(coeffs[3]+coeffs[4]+1) + 2*g1*coeffs[3]*(v1*v-1) + 2*g2*coeffs[4]*(v2*v-1)\n\n df3du = g3*coeffs[3]*(g1*v1-v0) + g3*coeffs[4]*(g2*v2-v0) + g3*(g3*v-v0)\n df3dv = g3*coeffs[3]*(g1*u1-u0) + g3*coeffs[4]*(g2*u2-u0) + g3*(g3*u-u0)\n df3dg1 = 2*g1*coeffs[0]*u1*v1 - (v1*u0+u1*v0)*(coeffs[0]+coeffs[1]+coeffs[3]) + g2*coeffs[1]*(u1*v2+v1*u2) + g3*coeffs[3]*(v1*u+u1*v)\n df3dg2 = 2*g2*coeffs[2]*u2*v2 - (v2*u0+u2*v0)*(coeffs[1]+coeffs[2]+coeffs[4]) + g1*coeffs[1]*(u1*v2+v1*u2) + g3*coeffs[4]*(v2*u+u2*v)\n df3dg3 = 2*g3*u*v - (u*v0+v*u0)*(coeffs[3]+coeffs[4]+1) + g1*coeffs[3]*(v1*u+u1*v) + g2*coeffs[4]*(v2*u+u2*v)\n\n df4du = g3*coeffs[3]*(g1-1) + g3*coeffs[4]*(g2-1) + g3*(g3-1)\n df4dv = 0\n df4dg1 = 2*g1*coeffs[0]*u1 - (u0+u1)*(coeffs[0]+coeffs[1]+coeffs[3]) + g2*coeffs[1]*(u1+u2) + g3*coeffs[3]*(u+u1)\n df4dg2 = 2*g2*coeffs[2]*u2 - (u0+u2)*(coeffs[1]+coeffs[2]+coeffs[4]) + g1*coeffs[1]*(u1+u2) + g3*coeffs[4]*(u+u2)\n df4dg3 = 2*g3*u - (u+u0)*(coeffs[3]+coeffs[4]+1) + g1*coeffs[3]*(u+u1) + g2*coeffs[4]*(u+u2)\n\n df5du = 0\n df5dv = g3*coeffs[3]*(g1-1) + g3*coeffs[4]*(g2-1) + g3*(g3-1)\n df5dg1 = 2*g1*coeffs[0]*v1 - (v1+v0)*(coeffs[0]+coeffs[1]+coeffs[3]) + g2*coeffs[1]*(v2+v1) + g3*coeffs[3]*(v1+v)\n df5dg2 = 2*g2*coeffs[2]*v2 - (v2+v0)*(coeffs[1]+coeffs[2]+coeffs[4]) + g1*coeffs[1]*(v2+v1) + g3*coeffs[4]*(v2+v)\n df5dg3 = 2*g3*v - (v0+v)*(coeffs[3]+coeffs[4]+1) + g1*coeffs[3]*(v1+v) + g2*coeffs[4]*(v2+v)\n\n return np.array([\n [df1du, df1dv, df1dg1, df1dg2, df1dg3],\n [df2du, df2dv, df2dg1, df2dg2, df2dg3],\n [df3du, df3dv, df3dg1, df3dg2, df3dg3],\n [df4du, df4dv, df4dg1, df4dg2, df4dg3],\n [df5du, df5dv, df5dg1, df5dg2, df5dg3],\n ])", "def alg(c):\n return c[0]*G[0] + c[1]*G[1] + c[2]*G[2]", "def solve(self):\n # check for jacobian and set it if present and to be used\n if self.use_sparse:\n if self._use_jac and hasattr(self.problem,'sparse_jac'):\n jac = self.problem.sparse_jac\n else:\n jac = None\n else:\n if self._use_jac and hasattr(self.problem,'jac'):\n jac = self.problem.jac\n else:\n jac = None\n \n # Initialize solver and solve \n \n solved = False\n local_min = False\n\n res = N.zeros(self.x0.__len__())\n while (not solved) and self.reg_count < 2:\n try:\n if self._use_fscale:\n self.solver.KINSOL_init(self.func,self.x0,self.dim,jac,self.constraints,self.use_sparse,self.verbosity,self.norm_of_res,self.reg_param,self.fscale)\n else:\n self.solver.KINSOL_init(self.func,self.x0,self.dim,jac,self.constraints,self.use_sparse,self.verbosity,self.norm_of_res,self.reg_param,None)\n start = time.clock()\n res = self.solver.KINSOL_solve(not self._use_ls)\n stop = time.clock()\n self.exec_time += (stop - start)\n solved = True\n except KINError as error:\n if error.value == 42:\n # Try the heuristic\n if hasattr(self.problem, 'get_heuristic_x0'):\n print \"----------------------------------------------------\"\n print \" Solver stuck with zero step-length.\"\n print \"----------------------------------------------------\"\n print \"The following variables have start value zero\"\n print \"and min set to zero causing the zero step-lenght.\"\n print \"These settings are either set by default or by user.\"\n print \"\"\n\n self.x0 = self.problem.get_heuristic_x0()\n self.reg_count += 1\n \n print \"\"\n print \"This setting (start and min to zero) can often\"\n print \"cause problem when initializing the system. \"\n print \"\"\n print \"To avoid this the above variables have\"\n print \"their start attributes reset to one.\"\n print \"\"\n print \"Trying to solve the system again...\"\n else:\n raise KINSOL_Exception(\"Regularization failed due to constraints, tried getting heuristic initial guess but failed.\")\n \n\n elif (error.value == 2):\n print \"---------------------------------------------------------\"\n print \"\"\n print \" !!! WARNING !!!\"\n print \"\"\n print \" KINSOL has returned a result but the algorithm has converged\"\n print \" to a local minima, the initial values are NOT consistant!\"\n print \"\"\n print \"---------------------------------------------------------\"\n solved = True\n local_min = True\n else:\n # Other error, send onward as exception\n self.problem.check_constraints(res)\n raise KINSOL_Exception(error.msg[error.value])\n \n if not solved:\n self.solver.Free_KINSOL()\n raise KINSOL_Exception(\"Algorithm exited solution loop without finding a solution, please contact Assimulo support.\")\n\n if self.check_with_model:\n self.problem.check_constraints(res)\n if not local_min:\n print \"Problem sent to KINSOL solved.\"\n \n return res", "def SelfDualNewtonSystem(A, b, c, e):\n \n n = A.shape[1]\n m = A.shape[0]\n \n b_bar = b - np.matmul(A,e)\n c_bar = c - e\n alpha = 1 + np.dot(c, e)\n beta = n + 2\n \n A_star = np.c_[A,-b,b_bar]\n C = np.zeros((n+2,n+2))\n C[0:n,n] = c\n C[n,0:n] = -C[0:n,n].T\n C[0:n,n+1] = -c_bar\n C[n+1,0:n] = -C[0:n,n+1].T\n C[n,n+1] = alpha\n C[n+1,n] = -C[n,n+1].T\n \n yA = np.r_[np.zeros((m,m)), -A_star.T, np.zeros((n+2, m))]\n xA = np.r_[A_star, C, np.eye(n+2)]\n sA = np.r_[np.zeros((m, n+2)), -np.eye(n+2), np.eye(n+2)]\n \n return np.c_[yA, xA, sA]", "def solve(self):\n start = timer()\n # encode into milp\n me = MILPEncoder(MILPSolver.prob,\n MILPSolver.params.logger.LOGFILE, \n MILPSolver.params.INTRA_DEP_CONSTRS,\n MILPSolver.params.INTER_DEP_CONSTRS)\n if MILPSolver.lp == True:\n gmodel = me.lp_encode()\n else:\n gmodel = me.encode()\n # Set gurobi parameters\n pgo = 1 if MILPSolver.params.PRINT_GUROBI_OUTPUT == True else 0\n gmodel.setParam('OUTPUT_FLAG', pgo)\n tl = MILPSolver.params.TIMEOUT\n if tl != -1 : gmodel.setParam('TIME_LIMIT', tl)\n if not MILPSolver.params.DEFAULT_CUTS: \n MILPSolver.disable_default_cuts(gmodel)\n gmodel._vars = gmodel.getVars()\n # set callback cuts \n MILPSolver.id_form = IdealFormulation(MILPSolver.prob,\n gmodel, \n MILPSolver.params.IDEAL_FREQ,\n MILPSolver.params.logger.LOGFILE)\n MILPSolver.dep_cuts = DepCuts(MILPSolver.prob,\n gmodel,\n MILPSolver.params.DEP_FREQ,\n MILPSolver.params.INTRA_DEP_CUTS,\n MILPSolver.params.INTER_DEP_CUTS,\n MILPSolver.sip_params,\n MILPSolver.params.logger.LOGFILE)\n # Optimise\n if MILPSolver.params.callback_enabled() and MILPSolver.lp == False:\n gmodel.optimize(MILPSolver._callback)\n else:\n gmodel.optimize()\n\n runtime = timer() - start\n cex = None \n if MILPSolver.status == SolveResult.BRANCH_THRESHOLD:\n result = SolveResult.BRANCH_THRESHOLD\n elif gmodel.status == GRB.OPTIMAL:\n cex_shape = MILPSolver.prob.spec.input_layer.input_shape\n cex = np.zeros(cex_shape)\n for i in itertools.product(*[range(j) for j in cex_shape]):\n cex[i] = MILPSolver.prob.spec.input_layer.out_vars[i].x\n result = SolveResult.UNSATISFIED\n elif gmodel.status == GRB.TIME_LIMIT:\n result = SolveResult.TIMEOUT\n elif gmodel.status == GRB.INTERRUPTED:\n result = SolveResult.INTERRUPTED\n elif gmodel.status == GRB.INFEASIBLE or gmodel.status == GRB.INF_OR_UNBD:\n result = SolveResult.SATISFIED\n else:\n result = SolveResult.UNKNOWN\n \n # MILPSolver.logger.info('Verification problem {} solved, '\n # 'LP: {}, '\n # 'time: {:.2f}, '\n # 'result: {}.'\n # .format(MILPSolver.prob.id,\n # MILPSolver.lp,\n # runtime,\n # result.value))\n \n return SolveReport(result, runtime, cex)", "def ecos_solve(A, b, c, dim_dict, **kwargs):\n\n ###\n # ECOS uses a different definition of the exp cone,\n # with y and z switched. In the future I might wrap it\n # (i.e., switch rows of A and elements of b, and switch\n # elements of the solutions s and y) but for now\n # I'm not supporting exp cones in ecos.\n ###\n\n ecos_cones = {'l': dim_dict['l'] if 'l' in dim_dict else 0,\n 'q': dim_dict['q'] if 'q' in dim_dict else []} # ,\n # 'e': dim_dict['ep'] if 'ep' in dim_dict else 0}\n # print(ecos_cones)\n if ('ep' in dim_dict and dim_dict['ep'] > 0\n or 's' in dim_dict and len(dim_dict['s']) > 0):\n raise SolverError(\n 'Only zero, linear, and second order cones supported.')\n zero = 0 if 'z' not in dim_dict else dim_dict['z']\n ecos_A, ecos_G = A[:zero, :], A[zero:, :]\n ecos_b, ecos_h = b[:zero], b[zero:]\n sol = ecos.solve(c=c, G=ecos_G, h=ecos_h, dims=ecos_cones,\n A=ecos_A, b=ecos_b, **kwargs)\n\n solution = True\n\n x = sol['x']\n s = np.concatenate([np.zeros(zero), sol['s']])\n # not sure we can trust this\n # s = b - A@x\n y = np.concatenate([sol['y'], sol['z']])\n\n if sol['info']['exitFlag'] == 0: # check that things make sense\n print('prim abs res.', np.linalg.norm(A@x + s - b))\n print('dua abs res.', np.linalg.norm(A.T@y + c))\n print('s^T y', s@y)\n\n if sol['info']['exitFlag'] in [1, 11]: # infeas\n solution = False\n obj = b@y\n assert (obj < 0)\n y /= -obj\n\n print('primal infeas. cert residual norm', np.linalg.norm(A.T@y))\n #cones = dim2cones(dim_dict)\n proj = prod_cone.Pi(-y, *make_prod_cone_cache(dim_dict))\n print('primal infeas dist from cone', np.linalg.norm(proj))\n # if not (np.linalg.norm(proj) == 0.) and sol['info']['exitFlag'] == 1.:\n # raise SolverError\n\n x = np.zeros_like(x)\n s = np.zeros_like(s)\n\n if sol['info']['exitFlag'] in [2, 12]: # unbound\n solution = False\n obj = c@x\n assert (obj < 0)\n x /= -obj\n s /= -obj\n\n print('dual infeas. cert residual norm', np.linalg.norm(A@x + s))\n proj = prod_cone.Pi(s, *make_prod_cone_cache(dim_dict))\n print('dual infeas cert dist from cone', np.linalg.norm(s - proj))\n # if not (np.linalg.norm(s - proj) == 0.) and sol['info']['exitFlag'] == 2.:\n # raise SolverError\n y = np.zeros_like(y)\n\n # print('ECOS SOLUTION')\n # print('solution', solution)\n # print('x', x)\n # print('s', s)\n # print('y', y)\n\n z = xsy2z(x, s, y, tau=solution, kappa=not solution)\n\n return z, sol['info']", "def block_conjgrad(A, Y, sigma, X0=None, tol=1e-2):\n Y, m, n, d, matrix_in = _format_system(A, Y)\n sigma = np.asarray(sigma, dtype='float')\n sigma = sigma.reshape(sigma.size, 1)\n\n damp = m * sigma**2\n rtol = tol * np.sqrt(m)\n G = lambda x: np.dot(A.T, np.dot(A, x)) + damp * x\n B = np.dot(A.T, Y)\n\n # --- conjugate gradient\n X = np.zeros((n, d)) if X0 is None else np.array(X0).reshape((n, d))\n R = B - G(X)\n P = np.array(R)\n Rsold = np.dot(R.T, R)\n AP = np.zeros((n, d))\n\n maxiters = int(n / d)\n for i in range(maxiters):\n AP = G(P)\n alpha = np.linalg.solve(np.dot(P.T, AP), Rsold)\n X += np.dot(P, alpha)\n R -= np.dot(AP, alpha)\n\n Rsnew = np.dot(R.T, R)\n if (np.diag(Rsnew) < rtol**2).all():\n break\n\n beta = np.linalg.solve(Rsold, Rsnew)\n P = R + np.dot(P, beta)\n Rsold = Rsnew\n\n info = {'rmses': _rmses(A, X, Y), 'iterations': i + 1}\n return X if matrix_in else X.flatten(), info", "def _solve_explicit(self, initial_conditions):\n coeff = self.a ** 2 * self.tau / self.h ** 2\n current_solution = initial_conditions\n next_solution = np.empty_like(current_solution)\n solutions = []\n\n for t in self.t_grid:\n next_solution[1:-1] = (\n current_solution[1:-1]\n + (current_solution[:-2] - 2 * current_solution[1:-1] + current_solution[2:]) * coeff\n ) + self.rhs(self.x_grid[1:-1], t) * self.tau\n\n # left bc\n if self.left_bc_type == \"DIRICHLET\":\n next_solution[0] = self.left_bc(t)\n elif self.left_bc_type == \"NEUMANN\":\n next_solution[0] = (\n 4 * next_solution[1]\n - next_solution[2]\n - 2 * self.h * self.left_bc(t)\n ) / 3.0\n\n # right bc\n if self.right_bc_type == \"DIRICHLET\":\n next_solution[-1] = self.right_bc(t)\n elif self.right_bc_type == \"NEUMANN\":\n next_solution[-1] = (\n 4 * next_solution[-2]\n - next_solution[-3]\n + 2 * self.h * self.right_bc(t)\n ) / 3.0\n if self.mode == \"VISUALIZATION\":\n solutions.append((t, next_solution.copy()))\n current_solution = next_solution\n if self.mode == \"TEST\":\n # print(\"Result: \", current_solution.tolist())\n # print(\"Right answer: \", self.anl_solution.tolist())\n self._norma(current_solution)\n elif self.mode == \"VISUALIZATION\":\n return solutions", "def jacobi(self, lattice):\n kernel = np.array([[[0.0,0.0,0.0],[0.0,1.0,0.0],[0.0,0.0,0.0]],\n [[0.0,1.0,0.0],[1.0,0.0,1.0],[0.0,1.0,0.0]],\n [[0.0,0.0,0.0],[0.0,1.0,0.0],[0.0,0.0,0.0]]])\n return ((signal.fftconvolve(lattice, kernel, mode='same') + self.J)/ 6.0)", "def solve(self):\n\n # Assign variables to each quantity being solved.\n r_lookup, lookup, num = {}, {}, 0\n for element in self.elements:\n if is_wire(element) and element is not self.ground:\n lookup[num] = element\n r_lookup[element] = num\n num += 1\n elif not is_cs(element) and element is not self.ground:\n lookup[num] = element\n r_lookup[element] = num\n num += 1\n\n # Set up the linear algebraic equation Ax=b\n A = np.zeros((num, num))\n b = np.zeros(num)\n for row, element in lookup.items():\n if is_wire(element) and element is not self.ground:\n for two_sided in element.attached:\n if is_cs(two_sided):\n if two_sided.pos is element:\n b[row] += -1 * two_sided.current\n else:\n b[row] += two_sided.current\n else:\n if two_sided.pos is element:\n flow = 1\n else:\n flow = -1\n A[row, r_lookup[two_sided]] = flow\n elif is_vs(element):\n check_connected(element)\n if element.pos is not self.ground:\n A[row, r_lookup[element.pos]] = 1\n if element.neg is not self.ground:\n A[row, r_lookup[element.neg]] = -1\n b[row] = element.voltage\n elif is_resistor(element):\n check_connected(element)\n if element.pos is not self.ground:\n A[row, r_lookup[element.pos]] = 1\n if element.neg is not self.ground:\n A[row, r_lookup[element.neg]] = -1\n A[row, r_lookup[element]] = -1 * element.resistance\n\n b = b.reshape((num, 1))\n try:\n x = np.linalg.solve(A, b)\n except np.linalg.LinAlgError:\n raise CircuitError('Insufficient information to solve circuit')\n\n # Assign values to all circuit components\n for i in range(num):\n item = lookup[i]\n if is_wire(item):\n item.potential = x[i, 0]\n elif isinstance(item, DualSided):\n item.current = x[i, 0]\n\n # Mark circuit as solved\n self.been_solved = True", "def jacobian(self, x):\n pass", "def calculate_coefficients(self):\n for i in range(0, self.nz):\n zno = i * self.dz\n self.z[0][i] = zno\n plot_eccentricity_error = False\n position = -1\n for j in range(0, self.ntheta):\n # fmt: off\n self.gama[i][j] = j * self.dtheta + (np.pi - self.beta)\n [radius_external, self.xre[i][j], self.yre[i][j]] = \\\n self.external_radius_function(self.gama[i][j])\n [radius_internal, self.xri[i][j], self.yri[i][j]] = \\\n self.internal_radius_function(zno, self.gama[i][j])\n self.re[i][j] = radius_external\n self.ri[i][j] = radius_internal\n\n w = self.omega * self.ri[i][j]\n\n k = (self.re[i][j] ** 2 * (np.log(self.re[i][j]) - 1 / 2) - self.ri[i][j] ** 2 *\n (np.log(self.ri[i][j]) - 1 / 2)) / (self.ri[i][j] ** 2 - self.re[i][j] ** 2)\n\n self.c1[i][j] = (1 / (4 * self.viscosity)) * ((self.re[i][j] ** 2 * np.log(self.re[i][j]) -\n self.ri[i][j] ** 2 * np.log(self.ri[i][j]) +\n (self.re[i][j] ** 2 - self.ri[i][j] ** 2) *\n (k - 1)) - 2 * self.re[i][j] ** 2 * (\n (np.log(self.re[i][j]) + k - 1 / 2) * np.log(\n self.re[i][j] / self.ri[i][j])))\n\n self.c2[i][j] = (- self.ri[i][j] ** 2) / (8 * self.viscosity) * \\\n ((self.re[i][j] ** 2 - self.ri[i][j] ** 2 -\n (self.re[i][j] ** 4 - self.ri[i][j] ** 4) /\n (2 * self.ri[i][j] ** 2)) +\n ((self.re[i][j] ** 2 - self.ri[i][j] ** 2) /\n (self.ri[i][j] ** 2 *\n np.log(self.re[i][j] / self.ri[i][j]))) *\n (self.re[i][j] ** 2 * np.log(self.re[i][j] / self.ri[i][j]) -\n (self.re[i][j] ** 2 - self.ri[i][j] ** 2) / 2))\n\n self.c0w[i][j] = (- w * self.ri[i][j] *\n (np.log(self.re[i][j] / self.ri[i][j]) *\n (1 + (self.ri[i][j] ** 2) / (self.re[i][j] ** 2 - self.ri[i][j] ** 2)) - 1 / 2))\n # fmt: on\n if not plot_eccentricity_error:\n if abs(self.xri[i][j]) > abs(self.xre[i][j]) or abs(\n self.yri[i][j]\n ) > abs(self.yre[i][j]):\n plot_eccentricity_error = True\n position = i\n if plot_eccentricity_error:\n self.plot_eccentricity(position)\n sys.exit(\n \"Error: The given parameters create a rotor that is not inside the stator. \"\n \"Check the plotted figure and fix accordingly.\"\n )", "def transform_and_compute_jacobian(self, xj):\n x = xj[:, :self.d].detach()\n log_j = xj[:, -1]\n\n x.requires_grad = True\n y = self.flow_(x)\n\n n_batch = xj.shape[0]\n\n jx = torch.zeros(n_batch, self.d, self.d).to(log_j.device)\n directions = torch.eye(self.d).to(log_j).unsqueeze(0).repeat(n_batch, 1, 1)\n\n for i in range(self.d):\n jx[:, i, :] = torch.autograd.grad(y, x, directions[:, i, :],\n allow_unused=True, create_graph=True, retain_graph=True)[0]\n x.requires_grad = False\n x.grad = None\n\n log_det_j = torch.log(torch.abs(torch.det(jx)))\n return torch.cat([y.detach(), (log_j + log_det_j).unsqueeze(1)], 1)", "def conj_grad(A, b, x0, n_steps=None, tol=1e-9):\n if n_steps is None:\n n_steps = len(b)\n x = x0\n r = b - A @ x0\n r2_old = r @ r\n p = r\n for i in range(n_steps):\n Ap = A @ p\n alpha = r2_old / (p @ Ap)\n x = x + alpha * p\n r = r - alpha * Ap\n r2_new = r @ r\n if r2_new < tol:\n break\n beta = r2_new / r2_old\n p = r + beta * p\n r2_old = r2_new\n return x", "def find_coefficients(self):\n self.make_matrix()\n self.coeffs = np.linalg.solve(self.global_matrix,self.global_vector)\n self.coeffs = np.append(self.coeffs, self.D) #Initial condition", "def _solve_implicit(self, initial_conditions):\n coeff = self.a ** 2 * self.tau / self.h ** 2\n l_and_u = (1, 1)\n ab = np.empty((3, self.n_x))\n # main diagonal\n ab[1] = 1 + 2.0 * coeff\n # upper and lower diagonals\n ab[0] = ab[2] = -coeff\n\n # left bc\n if self.left_bc_type == \"DIRICHLET\":\n ab[0][1] = 0 # upper diagonal\n ab[1][0] = 1 # main diagonal\n elif self.left_bc_type == \"NEUMANN\":\n ab[0][1] = 1 # upper diagonal\n ab[1][0] = -1 # main diagonal\n\n # right bc\n if self.right_bc_type == \"DIRICHLET\":\n ab[1][-1] = 1 # main diagonal\n ab[2][-2] = 0 # lower diagonal\n elif self.right_bc_type == \"NEUMANN\":\n ab[1][-1] = 1 # main diagonal\n ab[2][-2] = -1 # lower diagonal\n\n current_solution = initial_conditions\n solutions = []\n\n for t in self.t_grid:\n b = current_solution + self.tau * self.rhs(self.x_grid, t)\n # left bc\n if self.left_bc_type == \"DIRICHLET\":\n b[0] = self.left_bc(t)\n elif self.left_bc_type == \"NEUMANN\":\n b[0] = self.h * self.left_bc(t)\n # right bc\n if self.right_bc_type == \"DIRICHLET\":\n b[-1] = self.right_bc(t)\n elif self.right_bc_type == \"NEUMANN\":\n b[-1] = self.h * self.right_bc(t)\n\n next_solution = solve_banded(l_and_u, ab, b)\n if self.mode == \"VISUALIZATION\":\n solutions.append((t, next_solution.copy()))\n current_solution = next_solution\n if self.mode == \"TEST\":\n # print(\"Result: \", current_solution.tolist())\n # print(\"Right answer: \", self.anl_solution.tolist())\n self._norma(current_solution)\n elif self.mode == \"VISUALIZATION\":\n return solutions", "def solve_step(self, bc_left=0):\n status = 0\n self.t += self.dt\n\n\n ### Construct the RHS vector\n # Implicit terms\n #cff1 = 0. # Fully implicit\n #cff2 = 0.\n cff1 = 0.5*(1. - 2.*self.c_im)*self.dt\n cff2 = 0.5*self.c_im*self.dt\n RHS = cff1*self.L_rhs.dot(self.B) +\\\n cff2*self.L_rhs.dot(self.B_n_m1)\n\n # Nonlinear (explicit) terms\n cff3 = self.dt*(3 + self.b_ex)*0.5\n cff4 = -self.dt*(1+2*self.b_ex)*0.5\n cff5 = self.dt*(self.b_ex)*0.5\n \n RHS += cff3*self.calc_nonlinear_rhs(self.B)\n RHS += cff4*self.calc_nonlinear_rhs(self.B_n_m1)\n RHS += cff5*self.calc_nonlinear_rhs(self.B_n_m2)\n\n # Other terms from the time-derivative\n RHS += self.B\n\n # Add the BCs to the RHS\n cff0 = 0.5*(1 + self.c_im)*self.dt\n self.add_bcs(RHS, bc_left, cff0, cff1, cff2)\n\n # Use the direct banded matrix solver (faster)\n self.B_n_p1[:] = la.solve_banded( (self._j,self._j), self.L_lhs.data[::-1,:], RHS)\n\n # Check solutions\n if np.any( np.isnan(self.B_n_p1)):\n return -1\n\n # Update the terms last\n self.B_n_m2[:] = self.B_n_m1\n self.B_n_m1[:] = self.B\n self.B[:] = self.B_n_p1\n\n ## Update the boundary terms in these equations\n self.bcs[2] = self.bcs[1]\n self.bcs[1] = self.bcs[0]\n self.bcs[0] = bc_left\n\n return status", "def system(coeffs: List[List[int]], t: Symbol = Symbol('t', real=True)):\n matrix = Matrix(coeffs)\n procedure = Procedure()\n ident = eye(matrix.rows)\n lam = Symbol('lambda')\n char_eq = simplify((matrix - lam * ident).det())\n\n procedure\\\n .text('Characteristic equation: ', nl=True)\\\n .eq(Eq(char_eq, 0, evaluate=False))\n\n rts = roots(char_eq, lam)\n\n procedure.text('Eigenvalues and eigenvectors', nl=True)\n\n eigenvects = matrix.eigenvects()\n count = 1\n consts = numbered_symbols('C', Dummy, 1)\n sols = []\n conj_roots = []\n for eigenval, mult, eigenvec in eigenvects:\n\n # skip the conjugates of complex eigenvalues\n if not eigenval.is_real:\n if eigenval in conj_roots:\n continue\n\n procedure.latex('\\\\lambda_{} = {}'.format(\n count, eigenval), nl=True)\n for i in range(len(eigenvec)):\n \n aug_matrix = (matrix - eigenval * ident)\\\n .col_insert(matrix.cols, Matrix([0 for i in range(matrix.rows)]))\n procedure.eq(aug_matrix, nl=False).text(' ~ ')\\\n .eq(aug_matrix.rref()[0], nl=False).latex('\\\\Rightarrow ')\n\n procedure.eq(Eq(Dummy('v'), eigenvec[i], evaluate=False))\n if not eigenval.is_real:\n real, imag = eigenval.as_real_imag()\n real_vec, imag_vec = (\n eigenvec[i] * expand(exp(imag*I*t), complex=True)).as_real_imag()\n\n procedure.text(\"Use Euler's formula to expand the imaginary part\", nl=True)\n procedure.eq(eigenvec[i], nl=False).latex(' ').eq(exp(real*t + imag*I*t), nl=False)\\\n .latex(' = ').eq(exp(real*t), nl=False).latex(' ')\\\n .eq(eigenvec[i] * expand(exp(imag*I*t), complex=True), nl=False).latex(' = ')\\\n .eq(exp(real*t), nl=False).latex('\\\\left( ').eq(real_vec, nl=False)\\\n .latex(' + ').eq(imag_vec, nl=False).latex('\\\\right)', nl=True)\n # if mult == len(eigenvec):\n sols.append(['comp', exp(real * t), real_vec, imag_vec])\n\n # we don't need the conjugate\n conj_roots.append(conjugate(eigenval))\n else:\n # if mult == len(eigenvec):\n sols.append(['real', exp(eigenval * t), eigenvec[i]])\n \n if mult != len(eigenvec): # repeated eigenvectors\n procedure.text('Find the generalized eigenvector')\\\n .latex('\\\\left( M - \\\\lambda I \\\\right) w = v ', nl=True)\n \n vec_syms = symbols('a0:{}'.format(matrix.rows))\n generalized_eigenvec = Matrix(vec_syms)\n\n # note: insert is not in-place\n # construct the augmented matrix [ M-lambda I | v]\n aug_matrix = (matrix - eigenval * ident).col_insert(matrix.cols, eigenvec[0]) \n procedure.eq(aug_matrix, nl=False).text(' ~ ').eq(aug_matrix.rref()[0], nl=False)\n\n result = solve((matrix - eigenval * ident) *\n generalized_eigenvec - eigenvec[0], generalized_eigenvec)\n\n free_vars = list(vec_syms)\n\n # use free variables to express other variables\n for var in result:\n if var in free_vars:\n free_vars.remove(var)\n generalized_eigenvec = generalized_eigenvec.subs(\n var, result[var])\n for i, var in enumerate(free_vars): # use 0, 1... for free variables\n generalized_eigenvec = generalized_eigenvec.subs(var, i)\n\n procedure.latex('\\\\Rightarrow ')\\\n .eq(Eq(Dummy('w'), generalized_eigenvec, evaluate=False))\n\n sols.append(\n ['gen', exp(eigenval * t), eigenvec[0], generalized_eigenvec])\n\n count += mult\n\n procedure.text('General solution: ', nl=True)\n procedure.latex('\\\\vec{\\\\mathbf{x}} = ')\n gen_sols = []\n for i in range(len(sols)):\n sol = sols[i]\n if sol[0] == 'real':\n procedure.eq(next(consts), nl=False).eq(\n sol[1], nl=False).eq(sol[2], nl=False)\n gen_sols.append(sol[1] * sol[2])\n elif sol[0] == 'gen':\n procedure.eq(next(consts), nl=False).eq(sol[1], nl=False)\\\n .latex('\\\\left(').eq(sol[2], nl=False).latex('t + ')\\\n .eq(sol[3], nl=False).latex('\\\\right)')\n gen_sols.append(sol[1] * sol[2] * t + sol[1] * sol[3])\n elif sol[0] == 'comp':\n procedure.eq(sol[1], nl=False)\\\n .latex('\\\\left(').eq(next(consts), nl=False).eq(sol[2], nl=False).latex(' + ')\\\n .eq(next(consts), nl=False).eq(sol[3], nl=False).latex('\\\\right)')\n gen_sols.append(sol[1] * sol[2])\n gen_sols.append(sol[1] * sol[3])\n\n if i != len(sols) - 1:\n procedure.latex('+')\n\n return gen_sols, procedure", "def solve(self,init=None,g_init=1e-3,g_step=5e-3,g_fin=None,evol=False,movingGrid=False):\n if(g_fin==None): g_fin=self.g\n #Check if all signs are correct\n if(g_fin<0):\n if(g_step>0): g_step*=-1.\n if(g_init>0): g_init*=-1.\n else:\n if(g_step<0): g_step*=-1.\n if(g_init<0): g_step*=-1.\n\n #If no initial distribution is given, start from the BCS ground state\n if(init==None): init=[1 if i<self.N else 0 for i in range(self.n)]\n var_init=np.array([-2.*init[i]-g_init/(1-2.*init[i])*np.sum([self.XXZ.Z(j,i)*(init[j]-init[i]) for j in range(self.n) if j!=i]) for i in range(self.n)])\n n_step=int((g_fin-g_init)/g_step)\n g=g_init\n\n #Define necessary variables if evol or movingGrid=True\n if(evol or movingGrid):\n var_evol=np.zeros([n_step,self.n])\n g_evol=np.zeros(n_step)\n if(movingGrid):\n rap_evol = np.zeros([n_step,self.N],dtype=complex)\n rap_evol[0] = [self.levels[i] for i in range(self.n) if init[i]!=0 ]\n rap=np.array([self.levels[i]+0.5*np.abs(np.random.rand()) for i in range(self.n) if init[i]!=0])\n grid=np.zeros(self.N+1,dtype=complex)\n grid[0]=1e3\n for k in range(self.N): grid[k+1]=rap[k]\n n_grid=n_step/20 #Calculates rapidities at 20 intermediate steps\n\n #Gradually increase the coupling constant g and solve iteratively at each step starting from the Taylor approximation from the previous step\n for i in range(n_step):\n var_new=self.newtonraphson(g,var_init)\n der=self.get_derivative(var_new,g)\n #var_init=self.taylor_expansion(g,g_step,var_new)\n var_init = var_new+g_step*der\n g+=g_step\n #print g\n\n #Save variables at current step if evol =True\n if(evol or movingGrid):\n var_evol[i]=var_init\n g_evol[i]=g\n if(movingGrid and i%n_grid==0 and i!=0):\n #Method for obtaining the rapidities starting from the set of Lambda_i\n rf=RootFinder(self.XXZ,var_evol[i]/g_evol[i],g_evol[i],self.N)\n u=rf.solveForU(grid)\n lm=LaguerreMethod(grid,u)\n rap=lm.laguerre()\n rap_evol[i]=np.sort(lm.laguerre())\n for k in range(self.N): grid[k+1]=rap[k]\n grid[0]=10*max(rap)\n elif(movingGrid and i!=0):\n rf=RootFinder(self.XXZ,var_evol[i]/g_evol[i],g_evol[i],self.N)\n u=rf.solveForU(grid)\n lm=LaguerreMethod(grid,u)\n rap_evol[i]=np.sort(lm.laguerre())\n \n \n #One final iterative solution at g=g_fin\n self.solution=self.newtonraphson(g_fin,var_init)\n #Calculate the occupation numbers\n self.occupation=0.5*(-1.-self.solution+g_fin*self.get_derivative(self.solution,g_fin))\n\n #One final calculation of the rapidities\n if(movingGrid):\n rf=RootFinder(self.XXZ,self.solution/g_fin,g_fin,self.N)\n u=rf.solveForU(grid)\n lm=LaguerreMethod(grid,u)\n rap=lm.laguerre()\n self.rapidities=rap\n\n if movingGrid: return [g_evol,var_evol,rap_evol]\n if evol: return [g_evol,var_evol]\n return self.solution", "def solve(self):\n initial_fes = eades(self.graph, self.force_forward_edges)\n initial_fes_vec = self.edge_vector(initial_fes)\n\n # bounds for the objective\n lower_bound = 0\n upper_bound = np.sum(initial_fes_vec @ self.weights)\n\n self.logger.info('Calculating FES for graph with %d edges, max %d feedback edges', self.m, len(initial_fes))\n\n simple_cycles = set(induced_cycles(self.graph, initial_fes))\n\n for iteration in itertools.count(1):\n self.logger.info('Baharev iteration %d, %g <= objective <= %g, %d simple cycles', iteration, lower_bound,\n upper_bound, len(simple_cycles))\n\n # Formulate and solve the problem for this iteration:\n y = cp.Variable(self.m, boolean=True, name=\"y\")\n objective = cp.Minimize(cp.sum(y @ self.weights))\n\n cycle_vectors = [self.edge_vector(nx.utils.pairwise(cycle)) for cycle in simple_cycles]\n constraints = [cp.sum(a @ y) >= 1 for a in cycle_vectors]\n constraints.append(cp.sum(y @ self.force_forward_vec) == 0) # no force forward vec may be in the result set\n problem = cp.Problem(objective, constraints)\n resolution = problem.solve(**self.solver_args)\n if problem.status != 'optimal':\n self.logger.warning('Optimization solution is %s. Try solver != %s?', problem.status,\n problem.solver_stats.solver_name)\n self.logger.debug(\n \"Solved optimization problem with %d constraints: %s -> %s (%g + %g seconds, %d iterations, solver %s)\",\n len(constraints), resolution, problem.solution.status,\n problem.solver_stats.solve_time or 0, problem.solver_stats.setup_time or 0,\n problem.solver_stats.num_iters or 0, problem.solver_stats.solver_name)\n current_solution = np.abs(y.value) >= 0.5 # y.value = vector of floats each ≈ 0 or 1\n current_fes = self.edges_for_vector(current_solution)\n self.logger.debug('Iteration %d, resolution: %s, %d feedback edges', iteration, resolution,\n len(current_fes))\n # S, the feedback edge set calculated using the constraint subset, can be an incomplete solution\n # (i.e. cycles remain after removing S from the graph). So lets compare this with the upper bound\n # from the heuristic\n lower_bound = max(lower_bound, objective.value)\n if lower_bound == upper_bound:\n self.logger.info('upper == lower bound == %g, optimal solution found', lower_bound)\n break # y.value is the optimal solution\n\n if resolution > upper_bound:\n self.logger.error('Solution %g > upper bound %g!', resolution, upper_bound)\n break\n\n Gi = self.graph.copy()\n Gi.remove_edges_from(current_fes)\n if nx.is_directed_acyclic_graph(Gi):\n self.logger.info('Graph is acyclic, optimal solution found')\n break # y.value is the optimal solution\n\n # The solution is not yet ideal. So we take G^(i), the graph still containing some feedback edges,\n # calculate a heuristic on it and use the heuristic (= over-estimation) to adjust upper bound and\n # determine additional simple cycles (= constraints)\n Fi = eades(Gi, self.force_forward_edges)\n yi = self.edge_vector(Fi) | current_solution\n zi = np.sum(yi @ self.weights)\n if zi < upper_bound:\n upper_bound = zi\n current_solution = yi\n simple_cycles |= set(induced_cycles(Gi, Fi))\n\n self.solution_vector = current_solution\n self.solution = self.edges_for_vector(current_solution)\n self.objective = objective.value\n self.iterations = iteration\n self.simple_cycles = simple_cycles\n return self.solution", "def _C(self):\n\n # Find the local x and y coordinates at each node\n xi = 0\n yi = 0\n xj = self.width()\n yj = 0\n xm = xj\n ym = self.height()\n xn = 0\n yn = ym\n\n # Calculate the [C] coefficient matrix\n C = array([[1, xi, yi, xi**2, xi*yi, yi**2, xi**3, xi**2*yi, xi*yi**2, yi**3, xi**3*yi, xi*yi**3],\n [0, 0, 1, 0, xi, 2*yi, 0, xi**2, 2*xi*yi, 3*yi**2, xi**3, 3*xi*yi**2],\n [0, -1, 0, -2*xi, -yi, 0, -3*xi**2, -2*xi*yi, -yi**2, 0, -3*xi**2*yi, -yi**3],\n \n [1, xj, yj, xj**2, xj*yj, yj**2, xj**3, xj**2*yj, xj*yj**2, yj**3, xj**3*yj, xj*yj**3],\n [0, 0, 1, 0, xj, 2*yj, 0, xj**2, 2*xj*yj, 3*yj**2, xj**3, 3*xj*yj**2],\n [0, -1, 0, -2*xj, -yj, 0, -3*xj**2, -2*xj*yj, -yj**2, 0, -3*xj**2*yj, -yj**3],\n\n [1, xm, ym, xm**2, xm*ym, ym**2, xm**3, xm**2*ym, xm*ym**2, ym**3, xm**3*ym, xm*ym**3],\n [0, 0, 1, 0, xm, 2*ym, 0, xm**2, 2*xm*ym, 3*ym**2, xm**3, 3*xm*ym**2],\n [0, -1, 0, -2*xm, -ym, 0, -3*xm**2, -2*xm*ym, -ym**2, 0, -3*xm**2*ym, -ym**3],\n\n [1, xn, yn, xn**2, xn*yn, yn**2, xn**3, xn**2*yn, xn*yn**2, yn**3, xn**3*yn, xn*yn**3],\n [0, 0, 1, 0, xn, 2*yn, 0, xn**2, 2*xn*yn, 3*yn**2, xn**3, 3*xn*yn**2],\n [0, -1, 0, -2*xn, -yn, 0, -3*xn**2, -2*xn*yn, -yn**2, 0, -3*xn**2*yn, -yn**3]])\n \n # Return the coefficient matrix\n return C", "def nonlinear_eom_to_ss(aircraft, x_ss, u_ss, x_0, u_0, m, j, dx=0.1, du=0.1):\n \"\"\"return jacobians a, b wrt to x_ss and output matrices c, and d wrt u_ss.\"\"\"\n x = x_0\n u = u_0\n a = zeros((len(x_0), len(x_0)))\n b = zeros((len(x_0), len(u_0)))\n for ii in range(0, len(x_0)):\n x[ii] = x[ii] + dx\n c = c_f_m(aircraft, x, u_0)\n dxdt_1 = nonlinear_eom(x, m, j, c)\n\n x[ii] = x[ii] - dx\n c = c_f_m(aircraft, x, u_0)\n dxdt_2 = nonlinear_eom(x, m, j, c)\n ddx_dx = (dxdt_1 - dxdt_2)/(2*dx)\n a[:, ii] = transpose(ddx_dx)\n x = x_0\n\n for ii in range(0, len(u_0)):\n u[ii] = u[ii] + du\n c = c_f_m(aircraft, x_0, u)\n dxdt_1 = nonlinear_eom(x, m, j, c)\n\n u[ii] = u[ii] - du\n c = c_f_m(aircraft, x_0, u)\n dxdt_2 = nonlinear_eom(x, m, j, c)\n ddx_dx = (dxdt_1 - dxdt_2)/(2*du)\n b[:, ii] = transpose(ddx_dx)\n u = u_0\n\n a_out = a[x_ss, :]\n a_out = a_out[:, x_ss]\n\n b_out = b[x_ss, :]\n b_out = b_out[:, u_ss]\n\n c_out = identity(len(x_ss))\n d_out = zeros((len(x_ss), len(u_ss)))\n return a_out, b_out, c_out, d_out", "def solve_inc(self, DU, DF, calcG=True):\n\n nu = len(self.udofs)\n np = len(self.pdofs)\n ndof = len(self.dofs)\n decompose = False\n if calcG: decompose = True\n scheme = self.scheme\n\n if calcG:\n if self.verbose and nu>500: print \" building system...\", ; sys.stdout.flush()\n self.mountG()\n\n # Mount G11.. G22 matrices\n cG = self.G.tocsc()\n self.G11 = cG[:nu , :nu ]\n self.G12 = cG[:nu , nu:]\n self.G21 = cG[ nu:, :nu ]\n self.G22 = cG[ nu:, nu:]\n cG = None # Free memory\n\n # Pick last values for disp, vel and accel\n U_0 = self.U.copy()\n Uv_0 = self.Uv.copy()\n Ua_0 = self.Ua.copy()\n\n # Mount RHS\n self.RHS = self.DF - dot(self.C, Uv_0 + (1.0-gamma)*h*Ua_0) - dot(self.K, U_0 + h*Uv_0 + (0.5-beta)*(h**2.0)*Ua_0) \n\n RHS1 = RHS[:nu]\n Ua2 = DU[nu:]\n\n # Solve linear system\n RHS2 = self.G22*Ua2 #sparse matrix * dense vector\n if nu:\n if self.verbose and nu>500: print \"solving...\", ; sys.stdout.flush()\n if scheme == \"MNR\" and decompose : self.LUsolver = factorized(self.G11)\n if scheme == \"NR\" or scheme == \"FE\": self.LUsolver = factorized(self.G11)\n U1 = scipy.sparse.linalg.spsolve(self.G11, RHS1 - self.G12*Ua2)\n RHS2 += self.G21*Ua1\n\n # updating disp, vel and accel\n self.Uv = Uv_0 + (1.0-gamma)*h*Ua_0 + gamma*h*self.Ua\n self.U = U_0 + h*Uv_0 + (0.5-beta)*(h**2.0)*Ua_0 + (h**2.0)*beta*self.Ua\n \n # calculating reactions\n self.DF = dot(self.M,self.Ua) + dot(self.C,self.Uv) + dot(self.K,self.U)\n for i in range(nu):\n self.F[self.udofs[i].eq_id] = F_bk[self.udofs[i].eq_id]\n\n # Complete vectors\n for i, dof in enumerate(self.udofs): DU[dof.eq_id] = U1[i]\n for i, dof in enumerate(self.pdofs): DF[dof.eq_id] = F2[i]\n\n if self.verbose and nu>500: print \"updating...\" ; sys.stdout.flush()\n DFint = self.update_elems_and_nodes(DU) # Also calculates DFint\n #if self.verbose: print \" done.\"\n\n R = DF - DFint\n return DFint, R", "def conjugate(x):\n if len(list(x.size())) == 2:\n z = torch.zeros(2, x.size()[1], dtype=torch.double, device=x.device)\n z[0] = x[0]\n z[1] = -x[1]\n\n if len(list(x.size())) == 3:\n z = torch.zeros(\n 2, x.size()[2], x.size()[1], dtype=torch.double, device=x.device\n )\n z[0] = torch.transpose(x[0], 0, 1)\n z[1] = -torch.transpose(x[1], 0, 1)\n\n return z", "def solve(self):\n ...", "def compute_jacobian(self):\n \n d = len(self.theta)\n n,p = self.b.shape\n \n if not self.quiet:\n print \"Running jacobian computation.\"\n print \"D will be a {}x{}x{} array\".format(p,n,d)\n \n if self.x is None:\n raise ValueError('Can not compute Jacobian. self.x is None.')\n \n #print \"n={},n={}\".format(n,d);\n \n D = numpy.zeros((p,n,d))\n \n \n for k in range(d):\n A_k, b_k = self.get_diff_A_b(k)\n \n for i in range(p):\n D[i,:,k] = - self.solver.backsolve(A_k.dot(self.x[:,i]) - b_k[:,i])\n \n return D", "def construct_linear_system(self):\n N=self.grid.Ncells()\n Nbc = len(self.dirichlet_bcs)\n self.Ncalc=Ncalc = N - Nbc\n\n # map cells to forced values\n dirichlet = dict( [ (c,v) for c,v,xy in self.dirichlet_bcs])\n\n self.is_calc_c = is_calc_c = np.ones(N,np.bool8)\n for c,v,xy in self.dirichlet_bcs:\n is_calc_c[c] = False\n\n # is_calc_c[self.c_mask] = False\n\n # c_map is indexed by real cell indices, and returns the matrix index\n c_map = self.c_map = np.zeros(N,np.int32)\n self.c_map[is_calc_c] = np.arange(Ncalc)\n\n dzc=self.dzc\n dzf=self.dzf\n area_c=self.area_c\n\n meth='coo' # 'dok'\n if meth == 'dok':\n A=sparse.dok_matrix((Ncalc,Ncalc),np.float64)\n else:\n # construct the matrix from a sequence of indices and values\n ij=[]\n values=[] # successive value for the same i.j will be summed\n \n b = np.zeros(Ncalc,np.float64)\n flux_per_gradient_j = -self.K_j * self.l_j * dzf / self.d_j * self.dt\n\n self.grid.edge_to_cells() # makes sure that edges['cells'] exists.\n \n for j in range(self.grid.Nedges()):\n e = self.grid.edges[j]\n ic1,ic2 = e['cells']\n \n if ic1<0 or ic2<0 or e['deleted']:\n continue # boundary edge, or deleted edge\n \n flux_per_gradient=flux_per_gradient_j[j]\n \n # this is the desired operation:\n # Cdiff[ic1] -= flux_per_gradient / (An[ic1]*dzc) * (C[ic2] - C[ic1])\n # Cdiff[ic2] += flux_per_gradient / (An[ic2]*dzc) * (C[ic2] - C[ic1])\n # Where Cdiff is row, C is col\n\n if is_calc_c[ic1] and is_calc_c[ic2]:\n mic2 = c_map[ic2]\n mic1 = c_map[ic1]\n v1=flux_per_gradient / (area_c[ic1]*dzc[ic1])\n v2=flux_per_gradient / (area_c[ic2]*dzc[ic2])\n \n if meth == 'dok':\n A[mic1,mic2] -= v1\n A[mic1,mic1] += v1\n A[mic2,mic2] += v2\n A[mic2,mic1] -= v2\n else:\n ij.append( (mic1,mic2) ) ; values.append(-v1)\n ij.append( (mic1,mic1) ) ; values.append(v1)\n ij.append( (mic2,mic2) ) ; values.append(v1)\n ij.append( (mic2,mic1) ) ; values.append(-v1)\n \n elif not ( is_calc_c[ic1] or is_calc_c[ic2] ):\n # both are dirichlet, so nothing to do\n pass\n elif not is_calc_c[ic2]:\n mic1 = c_map[ic1]\n v=flux_per_gradient / (self.area_c[ic1]*dzc[ic1])\n if meth == 'dok':\n A[mic1,mic1] += v\n else:\n ij.append( (mic1,mic1) )\n values.append(v)\n\n # roughly\n # A[1,1]*x[1] + A[1,2]*x[2] + ... = b[1]\n # but we already know x[2],\n # A[1,1]*x[1] + ... = b[1] - A[1,2]*x[2]\n # so flip the sign, multiply by known dirichlet value, and\n # add to the RHS\n b[mic1] += flux_per_gradient / (area_c[ic1]*dzc[ic1]) * dirichlet[ic2]\n else: # not is_calc_c[c1]\n mic2 = c_map[ic2]\n # A[mic2,mic2] += flux_per_gradient / (area_c[ic2]*dzc[ic2])\n # A[mic2,mic1] -= flux_per_gradient / (area_c[ic2]*dzc[ic2])\n\n # A[mic2,mic2]*x[2] + A[mic2,mic1]*x[1] = b[2]\n # ...\n # A[mic2,mic2]*x[2] - flux_per_gradient / (area_c[ic2]*dzc[ic2])*x[1] = b[2]\n # ...\n # A[mic2,mic2]*x[2] = b[2] + flux_per_gradient / (area_c[ic2]*dzc[ic2])*x[1]\n v=flux_per_gradient / (area_c[ic2]*dzc[ic2])\n if meth == 'dok':\n A[mic2,mic2] += v\n else:\n ij.append( (mic2,mic2) )\n values.append(v)\n b[mic2] += flux_per_gradient / (area_c[ic2]*dzc[ic2]) * dirichlet[ic1]\n\n # Used to test 'is not 0:' but modern python complains\n if isinstance(self.alpha,np.ndarray): \n for c in range(N):\n if self.is_calc_c[c]:\n mic=self.c_map[c]\n v=self.alpha[c]*self.dt\n if meth == 'dok':\n A[mic,mic] -= v\n else:\n ij.append( (mic,mic) )\n values.append(-v)\n\n # Flux boundary conditions:\n for ic,value,xy in self.neumann_bcs:\n mic=c_map[ic]\n # make mass/time into concentration/step\n # arrived at minus sign by trial and error.\n # 2023-08-04: there was a bug here that used ic2 instead of ic.\n b[mic] -= value/(area_c[ic]*dzc[ic]) * self.dt\n\n if meth == 'dok':\n self.A = sparse.coo_matrix(A)\n else:\n ijs=np.array(ij,dtype=np.int32)\n data=np.array(values,dtype=np.float64)\n A=sparse.coo_matrix( (data, (ijs[:,0],ijs[:,1]) ), shape=(Ncalc,Ncalc) )\n self.A=A\n \n # report scale to get a sense of whether dt is too large\n Ascale = A.diagonal().min()\n log.debug(\"Ascale is %s\"%Ascale)\n\n self.b = b", "def optimize(self):\n\n self.logger.info(\"Solving with Dynamic Slope Scaling Procedure in Julia :\")\n optimization_start = time.time()\n\n # 1. Preprocess for old network graph\n if self.old_network_graph is not None:\n\n # DSSP on old network\n old_network_obj = sum(list(nx.get_node_attributes(self.old_network_graph, config.BUILDING_CONSUMPTION_KEY).values()))-1e-5\n try:\n self.check_infeasibility(self.old_network_graph, old_network_obj)\n except DHCOptimizerException as e:\n e.data = \"Invalid existing network: \" + e.data\n raise e\n\n flows, obj_val = self.optimize_with_dssp_julia(self.old_network_graph, old_network_obj, set())\n self.logger.info(\"Optimization phase time: %.2fs\" % (time.time() - optimization_start))\n solution_old_graph = self.build_solution_graph(self.old_network_graph, flows)\n\n if self.modify_old_network:\n\n # Add max capacity on old edges\n self.old_capacity = deepcopy(flows)\n old_buildings = list(nx.get_node_attributes(self.old_network_graph, config.BUILDING_CONSUMPTION_KEY).values())\n for key in flows:\n if (key[1],key[0],0) not in self.old_capacity and key[1] not in old_buildings:\n self.old_capacity[(key[1],key[0],0)] = self.old_capacity[key]\n\n # Add Imaginary edges\n for edge in self.old_capacity:\n if self.optimization_graph.has_edge(*edge):\n\n # add nodes\n if not self.optimization_graph.has_node(config.IM_PREFIX+edge[0]):\n self.optimization_graph.add_node(config.IM_PREFIX+edge[0])\n self.optimization_graph.nodes[config.IM_PREFIX+edge[0]][config.GPD_GEO_KEY] = \\\n self.optimization_graph.nodes[edge[0]][config.GPD_GEO_KEY]\n if not self.optimization_graph.has_node(config.IM_PREFIX+edge[1]):\n self.optimization_graph.add_node(config.IM_PREFIX+edge[1])\n self.optimization_graph.nodes[config.IM_PREFIX+edge[1]][config.GPD_GEO_KEY] = \\\n self.optimization_graph.nodes[edge[1]][config.GPD_GEO_KEY]\n # add edges\n if not self.optimization_graph.has_edge(edge[0],config.IM_PREFIX+edge[0]):\n self.optimization_graph.add_edge(edge[0],config.IM_PREFIX+edge[0])\n if not self.optimization_graph.has_edge(config.IM_PREFIX+edge[0],config.IM_PREFIX+edge[1]):\n self.optimization_graph.add_edge(config.IM_PREFIX+edge[0],config.IM_PREFIX+edge[1])\n if not self.optimization_graph.has_edge(config.IM_PREFIX+edge[1],edge[1]):\n self.optimization_graph.add_edge(config.IM_PREFIX+edge[1],edge[1])\n\n # put cost\n self.optimization_graph.edges[(config.IM_PREFIX+edge[0],config.IM_PREFIX+edge[1],0)][config.EDGE_COST_KEY] = \\\n self.optimization_graph.edges[(edge[0],edge[1],0)][config.EDGE_COST_KEY]\n self.optimization_graph.edges[(edge[0],edge[1],0)][config.EDGE_COST_KEY] = 1e-5\n self.optimization_graph.edges[(edge[0],config.IM_PREFIX+edge[0],0)][config.EDGE_COST_KEY] = 1e-5\n self.optimization_graph.edges[(config.IM_PREFIX+edge[1],edge[1],0)][config.EDGE_COST_KEY] = 1e-5\n\n else:\n # if we don't modify the old network, we have to change the capacity of the supplies\n already_consummed = {}\n for edge in solution_old_graph.edges():\n if solution_old_graph.nodes[edge[0]].get(config.NODE_TYPE_KEY) == config.SUPPLY_NODE_TYPE:\n already_consummed[edge[0]] = already_consummed.get(edge[0], 0) + \\\n solution_old_graph.edges[edge][config.SOLUTION_POWER_FLOW_KEY]\n for source in already_consummed:\n if already_consummed[source] <= self.optimization_graph.nodes[source][config.SUPPLY_POWER_CAPACITY_KEY]:\n self.optimization_graph.nodes[source][config.SUPPLY_POWER_CAPACITY_KEY] -= already_consummed[source]\n self.network_objective -= already_consummed[source]\n else:\n self.network_objective -= self.optimization_graph.nodes[source][config.SUPPLY_POWER_CAPACITY_KEY]\n self.optimization_graph.nodes[source][config.SUPPLY_POWER_CAPACITY_KEY] = 0\n\n # Remove edges from old network\n edges_to_remove = set()\n for e in self.optimization_graph.edges():\n if self.old_network_graph.has_edge(*e) or self.old_network_graph.has_edge(e[1],e[0]):\n edges_to_remove.add(e)\n self.optimization_graph.remove_edges_from(edges_to_remove)\n\n # Remove isolated buildings of optimization graph\n isolated_to_remove = set()\n for e in self.old_network_graph.edges():\n if e[0] in self.old_network_graph.nodes() and \\\n self.optimization_graph.nodes[e[1]].get(config.NODE_TYPE_KEY) == config.BUILDING_NODE_TYPE:\n isolated_to_remove.add(e)\n self.optimization_graph.remove_edges_from(isolated_to_remove)\n\n # Remove buildings from old network\n for n, data in self.old_network_graph.nodes(data=True):\n if data.get(config.NODE_TYPE_KEY) == config.BUILDING_NODE_TYPE:\n self.optimization_graph.remove_node(n)\n\n # Re-link sources\n sources = set()\n for n, data in self.optimization_graph.nodes(data=True):\n if data.get(config.NODE_TYPE_KEY) == config.SUPPLY_NODE_TYPE:\n sources.add(n)\n source_graph = self.optimization_graph.subgraph(sources).copy()\n self.optimization_graph.remove_nodes_from(sources)\n gnx.remove_isolates(self.optimization_graph)\n node_filter = lambda n: self.optimization_graph.nodes.get(n,{}).get(config.NODE_TYPE_KEY) != config.BUILDING_NODE_TYPE\n gnx.spatial_points_merge(self.optimization_graph, source_graph.nodes_to_gdf(), node_filter=node_filter, inplace=True)\n\n # fill missing information\n gnx.fill_edges_missing_geometry_attributes(self.optimization_graph)\n gnx.fill_length_attribute(self.optimization_graph, config.EDGE_LENGTH_KEY, only_missing=True)\n gnx.fill_length_attribute(self.optimization_graph, config.EDGE_COST_KEY, only_missing=True)\n for e in self.optimization_graph.edges(keys=True):\n self.optimization_graph.edges[e][config.LEASTCOST_COEF_KEY] = \\\n self.optimization_graph.edges[e].get(config.LEASTCOST_COEF_KEY,0)\n\n\n\n # 2. Process the DSSP on optimization graph\n self.check_is_ready()\n self.check_infeasibility(self.optimization_graph, self.network_objective)\n\n if self.old_network_graph is not None and self.modify_old_network:\n old_buildings = set(nx.get_node_attributes(self.old_network_graph, config.BUILDING_CONSUMPTION_KEY).keys())\n else:\n old_buildings = set()\n flows, obj_val = self.optimize_with_dssp_julia(self.optimization_graph, self.network_objective, old_buildings,postprocess= (not self.modify_old_network))\n self.logger.info(\"Optimization phase time: %.2fs\" % (time.time() - optimization_start))\n self.solution_graph = self.build_solution_graph(self.optimization_graph, flows, self.connected)\n\n # 3. Postprocess for old network graph\n if self.old_network_graph is not None:\n \n if self.modify_old_network:\n # Put the right supply capacity and cost\n for edge in self.old_capacity:\n if self.solution_graph.has_edge(edge[0],edge[1]):\n self.solution_graph.edges[(edge[0],edge[1])][config.EDGE_COST_KEY] = \\\n self.optimization_graph.edges[(config.IM_PREFIX+edge[0],config.IM_PREFIX+edge[1],0)][config.EDGE_COST_KEY]\n \n # Remove imaginary edges\n imaginary_nodes_to_remove = set()\n nodes_to_relabel = {}\n for edge in self.solution_graph.edges():\n if str(edge[0]).startswith(config.IM_PREFIX) and str(edge[1]).startswith(config.IM_PREFIX):\n real_edge = edge[0][len(config.IM_PREFIX):],edge[1][len(config.IM_PREFIX):]\n self.old_capacity[(real_edge[0], real_edge[1], 0)] = pd.np.inf\n self.old_capacity[(real_edge[1], real_edge[0], 0)] = pd.np.inf\n if not self.solution_graph.has_edge(*real_edge):\n for i in range(2):\n nodes_to_relabel[edge[i]] = real_edge[i]\n else:\n self.solution_graph.edges[real_edge[0],real_edge[1]][config.SOLUTION_POWER_FLOW_KEY] += \\\n self.solution_graph.edges[edge].get(config.SOLUTION_POWER_FLOW_KEY,0)\n imaginary_nodes_to_remove.add(edge[0])\n imaginary_nodes_to_remove.add(edge[1])\n elif str(edge[0]).startswith(config.IM_PREFIX):\n imaginary_nodes_to_remove.add(edge[0])\n elif str(edge[1]).startswith(config.IM_PREFIX):\n imaginary_nodes_to_remove.add(edge[1])\n\n nx.relabel_nodes(self.solution_graph, nodes_to_relabel, copy=False)\n self.solution_graph.remove_nodes_from(list(imaginary_nodes_to_remove))\n for node in nodes_to_relabel.values():\n if self.solution_graph.has_edge(node, node):\n self.solution_graph.remove_edge(node, node)\n\n else:\n for source in nx.get_node_attributes(self.solution_graph, config.SUPPLY_POWER_CAPACITY_KEY):\n self.solution_graph.nodes[source][config.SUPPLY_POWER_CAPACITY_KEY] += already_consummed.get(source,0)\n self.optimization_graph.nodes[source][config.SUPPLY_POWER_CAPACITY_KEY] += already_consummed.get(source,0)\n\n return flows, obj_val", "def cg_solve_jax(A,\n b,\n x_0=None,\n cg_iters=10,\n cg_residual_tol=1e-20,\n damping=1e-4):\n x = jnp.zeros_like(b) if x_0 is None else x_0\n if x_0 is not None:\n hvp_x0 = jnp.dot(A, x)\n\n r = b.copy() if x_0 is None else b-hvp_x0\n p = r.copy()\n rdotr = p.dot(r)\n\n for i in range(cg_iters):\n hvp_p = jnp.dot(A, p)\n z = hvp_p\n\n v = rdotr / p.dot(z)\n x += v * p\n r -= v * z\n\n s = r\n newrdotr = s.dot(r)\n mu = newrdotr / rdotr\n\n p = s + mu * p\n rdotr = newrdotr\n\n if rdotr < cg_residual_tol:\n break\n return x", "def equation(self):\n mat = np.empty((self.nunknowns, self.model.neq))\n rhs = np.zeros(self.nunknowns) # Needs to be initialized to zero\n for icp in range(self.ncp):\n istart = icp * self.nlayers\n ieq = 0\n for e in self.model.elementlist:\n if e.nunknowns > 0:\n qx, qy = e.disvecinflayers(self.xc[icp], self.yc[icp], self.layers)\n mat[istart:istart + self.nlayers, ieq:ieq + e.nunknowns] = \\\n qx * self.cosnorm[icp] + qy * self.sinnorm[icp] - self.resfac[:, np.newaxis] * \\\n (e.potinflayers(self.xcin[icp], self.ycin[icp], self.layers, aq=self.aq) / self.aq.Tcol[\n self.layers] - \\\n e.potinflayers(self.xcout[icp], self.ycout[icp], self.layers, aq=self.aq) / self.aq.Tcol[\n self.layers])\n ieq += e.nunknowns\n else:\n qx, qy = e.disveclayers(self.xc[icp], self.yc[icp], self.layers)\n rhs[istart:istart + self.nlayers] -= qx * self.cosnorm[icp] + qy * self.sinnorm[icp] + self.resfac * \\\n (e.potentiallayers(self.xcin[icp], self.ycin[icp], self.layers,\n aq=self.aq) / self.aq.T[self.layers] -\n e.potentiallayers(self.xcout[icp], self.ycout[icp],\n self.layers, aq=self.aq) / self.aq.T[\n self.layers])\n return mat, rhs", "def calculateElementCoefficients(self):\n #\n #get u,grad(u), and grad(u)Xgrad(w) at the quadrature points\n #\n for cj in range(self.nc):\n self.u[cj].getValues(self.q[('v',cj)],\n self.q[('u',cj)])\n if self.q.has_key(('grad(u)',cj)):\n self.u[cj].getGradientValues(self.q[('grad(v)',cj)],\n self.q[('grad(u)',cj)])\n #\n #get functions of (t,x,u) at the quadrature points\n #\n self.coefficients.evaluate(self.timeIntegration.t,self.q)\n log(\"Coefficients on element\",level=10,data=self.q)\n #\n # time integration is handled directly in ELLAM weak approximation, don't have a hook for\n # doing that via a time integration object (could if it were a direct Lagrange Galerkin formulation I believe)\n # however, need to set time integration's m_tmp if use that anywhere\n #if self.timeTerm:\n # self.timeIntegration.calculateElementCoefficients(self.q)\n\n #todo eventually can add nonlinear potential here\n\n #cek and mwf need to go through this section to clean up, some of next two blocks could go to calcQuad\n #\n #todo need non-diagonal dependence?\n for ci in range(self.nc):\n cfemIntegrals.calculateCFLADR(self.elementEffectiveDiametersArray,\n self.q[('dm',ci,ci)],\n self.q[('df',ci,ci)],#could just be velocity\n self.q[('cfl',ci)])", "def electrical_jacobian(self, state, u_in, omega, *_):\n pass", "def jac_res(self, params, **kwargs):\n e = kwargs.get(\"e\", self.problem.data_e)\n\n jac = self.jacobian.eval(params, **kwargs)\n return - jac / e[:, None]", "def solve_l1(y, A_fun, AT_fun, lambda_l1, reshape_img_fun, show_img_progress=False, alpha=0.2, max_iter=100, solver_tol=1e-6):\n\n\n obj_lss = np.zeros(max_iter)\n x_zs = np.zeros(max_iter)\n u_norms = np.zeros(max_iter)\n times = np.zeros(max_iter)\n\n ATy = AT_fun(y)\n x_shape = ATy.shape\n d = np.prod(x_shape)\n\n def A_cgs_fun(x):\n x = np.reshape(x, x_shape, order='F')\n y = AT_fun(A_fun(x)) + alpha * x\n return vec(y)\n A_cgs = LinearOperator((d,d), matvec=A_cgs_fun, dtype='float')\n\n def compute_p_inv_A(b, z0):\n (z,info) = sp.sparse.linalg.cgs(A_cgs, vec(b), x0=vec(z0), tol=1e-3, maxiter=100)\n if info > 0:\n print('cgs convergence to tolerance not achieved')\n elif info <0:\n print('cgs gets illegal input or breakdown')\n z = np.reshape(z, x_shape, order='F')\n return z\n\n\n def A_cgs_fun_init(x):\n x = np.reshape(x, x_shape, order='F')\n y = AT_fun(A_fun(x))\n return vec(y)\n A_cgs_init = LinearOperator((d,d), matvec=A_cgs_fun_init, dtype='float')\n\n def compute_init(b, z0):\n (z,info) = sp.sparse.linalg.cgs(A_cgs_init, vec(b), x0=vec(z0), tol=1e-2)\n if info > 0:\n print('cgs convergence to tolerance not achieved')\n elif info <0:\n print('cgs gets illegal input or breakdown')\n z = np.reshape(z, x_shape, order='F')\n return z\n\n # initialize z and u\n z = compute_init(ATy, ATy)\n u = np.zeros(x_shape)\n\n\n plot_normalozer = matplotlib.colors.Normalize(vmin=0.0, vmax=1.0, clip=True)\n\n\n start_time = timeit.default_timer()\n\n for iter in range(max_iter):\n\n # x-update\n net_input = z+u\n Wzu, wbook = wavelet_transform(net_input)\n q = soft_threshold(Wzu, lambda_l1/alpha)\n x = inverse_wavelet_transform(q, wbook, x_shape)\n x = np.reshape(x, x_shape)\n\n # z-update\n b = ATy + alpha * (x - u)\n z = compute_p_inv_A(b, z)\n\n # u-update\n u += z - x;\n\n if show_img_progress == True:\n\n fig = plt.figure('current_sol')\n plt.gcf().clear()\n fig.canvas.set_window_title('iter %d' % iter)\n plt.subplot(1,3,1)\n plt.imshow(reshape_img_fun(np.clip(x, 0.0, 1.0)), interpolation='nearest', norm=plot_normalozer)\n plt.title('x')\n plt.subplot(1,3,2)\n plt.imshow(reshape_img_fun(np.clip(z, 0.0, 1.0)), interpolation='nearest', norm=plot_normalozer)\n plt.title('z')\n plt.subplot(1,3,3)\n plt.imshow(reshape_img_fun(np.clip(net_input, 0.0, 1.0)), interpolation='nearest', norm=plot_normalozer)\n plt.title('netin')\n plt.pause(0.00001)\n\n\n obj_ls = 0.5 * np.sum(np.square(y - A_fun(x)))\n x_z = np.sqrt(np.mean(np.square(x-z)))\n u_norm = np.sqrt(np.mean(np.square(u)))\n\n print('iter = %d: obj_ls = %.3e |x-z| = %.3e u_norm = %.3e' % (iter, obj_ls, x_z, u_norm))\n\n\n obj_lss[iter] = obj_ls\n x_zs[iter] = x_z\n u_norms[iter] = u_norm\n times[iter] = timeit.default_timer() - start_time\n\n if x_z < solver_tol:\n break\n\n infos = {'obj_lss': obj_lss, 'x_zs': x_zs, 'u_norms': u_norms,\n 'times': times, 'alpha':alpha, 'lambda_l1':lambda_l1,\n 'max_iter':max_iter, 'solver_tol':solver_tol}\n\n\n return (x, z, u, infos)", "def conjgradient(x, p, gprev, gnew):\r\n gnew = np.array(gnew)[np.newaxis]\r\n gprev = np.array(gprev)[np.newaxis]\r\n gnew = gnew.T\r\n gprev = gprev.T\r\n beta = (gnew.T)@gnew/((gprev.T)@gprev)\r\n gnew = gnew.flatten()\r\n beta = beta.flatten()\r\n p = -gnew + beta*p\r\n return p", "def linear_conj_grad_r(quad_mult_fun, rhs, preconditioner, inputs, givens):\n # all params having the same dimensionality as rhs\n x = [ shared(numpy.zeros(p.get_value().shape,dtype=theano.config.floatX)) for p in rhs ]\n\n # conjugated gradients of all params having the same dimensionality as rhs\n direction = [ shared(numpy.zeros(p.get_value().shape,dtype=theano.config.floatX)) for p in rhs ]\n \n # stopping criterion\n old_residual_norm_squared = shared(numpy.zeros((),dtype=theano.config.floatX))\n Ax = quad_mult_fun(x)\n residual = [ b-a for a,b in zip(Ax, rhs)]\n residual_norm_squared = sum([ T.sum(T.sqr(r)/p) for (r,p) in zip(residual, preconditioner) ])\n\n curvature = sum( [ T.sum(a*b) for a,b in zip( quad_mult_fun(direction), direction) ])\n\n obj_fun = 0.5*sum( [T.sum( (a+b)*c ) for a,b,c in zip(residual, rhs, x)] )\n\n # line search to compute alpha_i\n alpha = residual_norm_squared / curvature\n\n # update params using alpha_i and d_i\n update_x = function(inputs,\n obj_fun,\n updates = [(a, a+alpha*d) for (a,d) in zip(x,direction)],\n givens = givens,\n on_unused_input='warn')\n\n # get new conjugated direction d_i\n beta = residual_norm_squared / old_residual_norm_squared\n iteration_updates = [ (d, r/p+beta*d) for (d,r,p) in zip(direction,residual,preconditioner) ]\n iteration_updates.append( (old_residual_norm_squared, residual_norm_squared) )\n update_direction = function(inputs, updates = iteration_updates,\n givens = givens,\n on_unused_input='warn')\n\n # init d_0\n init_updates = [ (d,r/p) for d,r,p in zip(direction, residual, preconditioner) ]\n init_updates.append( (old_residual_norm_squared, residual_norm_squared) )\n init = function(inputs, updates = init_updates, givens = givens,\n on_unused_input='warn')\n\n return (x, update_x, update_direction, init)", "def jacobi_solver(self, mat, rhs):\n x = np.zeros_like(rhs)\n for it_count in range(self.iterations_number):\n x_new = np.zeros_like(x)\n if self.verbose > 1:\n print(\"Iteration {0}: {1}\".format(it_count, x))\n for i in range(mat.shape[0]):\n s1 = np.dot(mat[i, :i], x[:i])\n s2 = np.dot(mat[i, i + 1:], x[i + 1:])\n x_new[i] = (rhs[i] - s1 - s2) / mat[i, i]\n if np.allclose(x, x_new, rtol=1e-8):\n break\n x = x_new\n return x", "def _cg(A, b, x0=None, tol=1.e-10, maxiter=1000):\n n = b.size\n assert A.n == n\n assert A.m == n\n b_norm = np.linalg.norm(b)\n\n # Jacobi pre-conditioner\n kvec = A.diag\n # For diag elem < 1e-6 we keep 1e-6.\n kvec = np.where(kvec > 1.e-6, kvec, 1.e-6)\n\n # Initial guess\n if x0 is None:\n x = np.zeros(n)\n else:\n x = x0\n\n r = b - A.dot(x)\n w = r/kvec\n\n p = np.zeros(n)\n beta = 0.0\n rho = np.dot(r, w)\n k = 0\n\n # Following C. T. Kelley\n while (np.sqrt(abs(rho)) > tol*b_norm) and (k < maxiter):\n p = w + beta*p\n z = A.dot(p)\n alpha = rho/np.dot(p, z)\n r = r - alpha*z\n w = r/kvec\n rhoold = rho\n rho = np.dot(r, w)\n x = x + alpha*p\n beta = rho/rhoold\n #err = np.linalg.norm(A.dot(x) - b) # absolute accuracy - not used\n k += 1\n err = np.linalg.norm(A.dot(x) - b)\n return x, err", "def run(self, C, p0 = None):\n global algorithm \n algorithm = AdaptiveMM(self.g, C, p0 = p0, lambda0 = 2000)\n solve()", "def JacobiSolve(A,b,tol=1.0e-6,max_iterations=100,LOUD=False):\n [Nrow, Ncol] = A.shape\n assert Nrow == Ncol\n N = Nrow\n converged = False\n iteration = 1\n x = np.random.rand(N) #random initial guess \n x_new = np.zeros(N)\n while not(converged):\n x = x_new.copy() #replace old value\n x_new *= 0 #reset x_new\n for row in range(N):\n x_new[row] = b[row]\n for column in range(N):\n if column != row:\n x_new[row] -= A[row,column]*x[column]\n x_new[row] /= A[row,row]\n relative_change = np.linalg.norm(x_new-x)/np.linalg.norm(x_new)\n if (LOUD):\n print(\"Iteration\",iteration,\": Relative Change =\",relative_change)\n if (relative_change < tol) or (iteration >= max_iterations):\n converged = True\n iteration += 1\n return x_new", "def _concentration(num, step, hl_a, hl_e, doses, return_diff=False):\n k_a = np.log(2) / hl_a\n k_e = np.log(2) / hl_e\n mat = np.float64([[-k_a, k_a, 0], [0, -k_e, k_e], [0, 0, 0]])\n mat_step = expm(mat * step)\n solution = np.zeros((num, 3))\n if return_diff:\n mat_tangent = np.copy(mat)\n diff = np.zeros(num)\n try:\n indexed_doses = {int(round(offset / step)): dose for offset, dose in doses.items()}\n except ZeroDivisionError:\n indexed_doses = {0: sum(doses.values())}\n for i in range(num):\n if i:\n solution[i] = mat_step.T @ solution[i-1]\n if i in indexed_doses:\n solution[i, 0] += indexed_doses[i]\n if return_diff:\n diff[i] = mat_tangent[0, 1] * solution[0, 0]\n mat_tangent[...] = mat_tangent @ mat_step\n if return_diff:\n return solution[:, 1], diff\n return solution[:, 1]", "def jacobian(self, v):\n from scipy.special import erf, erfcx\n def integrand(u_arr):\n \"\"\"Integrand of self-consistency equation\"\"\"\n integrand_all = erfcx(-u_arr)\n #integrand_all = np.zeros(u_arr.shape)\n #u_mask = u_arr < -4.0\n #u = u_arr[u_mask]\n #integrand_all[u_mask] = -1. / np.sqrt(np.pi) * (1.0 / u - 1.0 / (2.0 * u**3) + \n #3.0 / (4.0 * u**5) - \n #15.0 / (8.0 * u**7))\n #integrand_all[~u_mask] = np.exp(u_arr[~u_mask]**2) * (1. + erf(u_arr[~u_mask]))\n return integrand_all\n\n\n mu_v = self.mu(v)\n sd_v = self.sd(v)\n low = (self.V_r - mu_v) / sd_v # reduced resting potential\n up = (self.theta - mu_v) / sd_v # reduced threshold\n f_low = integrand(low)\n f_up = integrand(up)\n jac_mat_1 = self.tau_m * 1e-3 * np.sqrt(np.pi) * self.mat_mu\n jac_mat_2 = self.tau_m * 1e-3 * np.sqrt(np.pi) * self.mat_var / (2. * sd_v**2)\n\n jac_T = np.diag(1. / v**2) - \\\n jac_mat_1.T * (f_up - f_low) + \\\n jac_mat_2.T * (f_up * up - f_low * low)\n return jac_T.T", "def jacobian(self, c):\n\n raise NotImplementedError", "def JacobiSolve_Short(A,b,tol=1.0e-6,max_iterations=100,LOUD=False):\n [Nrow, Ncol] = A.shape\n assert Nrow == Ncol\n N = Nrow\n converged = False\n iteration = 1\n x = np.random.rand(N) #random initial guess \n x_new = np.zeros(N)\n while not(converged):\n x = x_new.copy() #replace old value\n x_new *= 0 #reset x_new\n #update is (b - whole row * x + diagonal part * x)/diagonal\n x_new = (b - np.dot(A,x)+ A.diagonal()*x)/A.diagonal()\n relative_change = np.linalg.norm(x_new-x)/np.linalg.norm(x_new)\n if (LOUD):\n print(\"Iteration\",iteration,\": Relative Change =\",relative_change)\n if (relative_change < tol) or (iteration >= max_iterations):\n converged = True\n iteration += 1\n return x_new", "def jacobian(self,var,g=None):\n if (g==None):g=self.g\n jac=np.zeros([self.n+1,self.n])\n for i in range(self.n):\n for j in range(self.n):\n if(i==j): jac[i][j]=2.*(var[i]+1.)-g*np.sum([self.XXZ.Z(i,k) for k in range(self.n) if k!=i])\n else: jac[i][j]=g*self.XXZ.Z(i,j)\n for i in range(self.n):\n jac[self.n][i]=1.\n return jac", "def create_exercise_example_16_1():\n G = np.array([[8, 2], [2, 2]])\n c = np.array([2, 3])\n\n a1 = np.array([1, -1], dtype=np.float64)\n a2 = np.array([1, 1], dtype=np.float64)\n a3 = np.array([1, 0], dtype=np.float64)\n constraints = np.array([\n LinearConstraint(LinearCallable(a=a1, b=0), equation_type=EquationType.GE),\n LinearConstraint(LinearCallable(a=a2, b=4), equation_type=EquationType.LE),\n LinearConstraint(LinearCallable(a=a3, b=3), equation_type=EquationType.LE)\n ])\n\n solution = np.array([1/6, -10/6], dtype=np.float64)\n\n return QuadraticProblem(G=G, c=c, n=2, constraints=constraints, x0=None, solution=solution)", "def conj(z):", "def efSolver2(self):\n dx = self.dh[0] # dx\n dy = self.dh[1] # dy\n dz = self.dh[2] # dz\n \n \"\"\"\n for i in np.arange(0, self.ni):\n for j in np.arange(0, self.nj):\n for k in np.arange(0, self.nk):\n \"\"\"\n\n ##x-component#\n #if i==0: \n #x-component#\n \"\"\"\n if i==0: \n # forward\n self.ef[i][j][k][0] = -(-3*self.phi[i][j][k]+\\\n 4*self.phi[i+1][j][k]-\\\n self.phi[i+2][j][k])/(2*dx)\n \"\"\"\n \n # forward\n self.ef[0,0:self.nj,0:self.nk,0] = -(-3*self.phi[0,0:self.nj,0:self.nk]+\\\n 4*self.phi[1,0:self.nj,0:self.nk]-\\\n self.phi[2,0:self.nj,0:self.nk])/(2*dx)\n \n #elif i==self.ni-1: \n \"\"\"\n elif i==self.ni-1: \n # backward\n self.ef[i][j][k][0] = -(self.phi[i-2][j][k]-\\\n 4*self.phi[i-1][j][k]+\\\n 3*self.phi[i][j][k])/(2*dx)\n \"\"\" \n # backward\n self.ef[self.ni-1,0:self.nj,0:self.nk,0] = -(self.phi[self.ni-3,0:self.nj,0:self.nk]-\\\n 4*self.phi[self.ni-2,0:self.nj,0:self.nk]+\\\n 3*self.phi[self.ni-1,0:self.nj,0:self.nk])/(2*dx)\n \"\"\"\n else: \n #central\n self.ef[i][j][k][0] = -(self.phi[i+1][j][k] - \\\n self.phi[i-1][j][k])/(2*dx)\n \"\"\" \n #central\n self.ef[1:self.ni-1,0:self.nj,0:self.nk,0] = -(self.phi[2:self.ni,0:self.nj,0:self.nk] - \\\n self.phi[0:self.ni-2,0:self.nj,0:self.nk])/(2*dx)\n\n\n #y-component\n #if j==0:\n \"\"\"\n if j==0:\n self.ef[i][j][k][1] = -(-3*self.phi[i][j][k] + \\\n 4*self.phi[i][j+1][k]-\\\n self.phi[i][j+2][k])/(2*dy)\n \n \"\"\"\n self.ef[0:self.ni,0,0:self.nk,1] = -(-3*self.phi[0:self.ni,0,0:self.nk] + \\\n 4*self.phi[0:self.ni,1,0:self.nk]-\\\n self.phi[0:self.ni,2,0:self.nk])/(2*dy)\n #elif j==self.nj-1:\n \"\"\"\n elif j==self.nj-1:\n self.ef[i][j][k][1] = -(self.phi[i][j-2][k] - \\\n 4*self.phi[i][j-1][k] +\\\n 3*self.phi[i][j][k])/(2*dy)\n \n \"\"\"\n self.ef[0:self.ni,self.nj-1,0:self.nk,1] = -(self.phi[0:self.ni,self.nj-3,0:self.nk] - \\\n 4*self.phi[0:self.ni,self.nj-2,0:self.nk] +\\\n 3*self.phi[0:self.ni,self.nj-1,0:self.nk])/(2*dy)\n #else:\n \"\"\"\n else:\n self.ef[i][j][k][1] = -(self.phi[i][j+1][k] - \\\n self.phi[i][j-1][k])/(2*dy)\n\n \"\"\"\n self.ef[0:self.ni,1:self.nj-1,0:self.nk,1] = -(self.phi[0:self.ni,2:self.nj,0:self.nk] - \\\n self.phi[0:self.ni,0:self.nj-2,0:self.nk])/(2*dy)\n\n #z-component\n '''\n if k==0:\n self.ef[i][j][k][2] = -(-3*self.phi[i][j][k] + \\\n 4*self.phi[i][j][k+1]-\n self.phi[i][j][k+2])/(2*dz)\n \n '''\n #z-component\n #if k==0:\n self.ef[0:self.ni,0:self.nj,0,2] = -(-3*self.phi[0:self.ni,0:self.nj,0] + \\\n 4*self.phi[0:self.ni,0:self.nj,1]-\n self.phi[0:self.ni,0:self.nj,2])/(2*dz)\n\n \"\"\"\n elif k==self.nk-1:\n self.ef[i][j][k][2] = -(self.phi[i][j][k-2] - \\\n 4*self.phi[i][j][k-1] + \\\n 3*self.phi[i][j][k])/(2*dz)\n \"\"\"\n \n #elif k==self.nk-1:\n self.ef[0:self.ni,0:self.nj,self.nk-1,2] = -(self.phi[0:self.ni,0:self.nj,self.nk-3] - \\\n 4*self.phi[0:self.ni,0:self.nj,self.nk-2] + \\\n 3*self.phi[0:self.ni,0:self.nj,self.nk-1])/(2*dz) \n \"\"\"\n else:\n self.ef[i][j][k][2] = -(self.phi[i][j][k+1] - \\\n self.phi[i][j][k-1])/(2*dz)\n \"\"\"\n #else:\n self.ef[0:self.ni,0:self.nj,1:self.nk-1,2] = -(self.phi[0:self.ni,0:self.nj,2:self.nk] - \\\n self.phi[0:self.ni,0:self.nj,0:self.nk-2])/(2*dz)", "def conjgrad(A, b, x=None, tol=1e-6, on_iteration=None):\n if x is None:\n x = Matrix2D.zeros(*b.shape)\n\n r = b - A.dot(x);\n p = r;\n residual_old = r.T.dot(r)\n\n iteration = 0\n while True:\n iteration += 1\n Ap = A.dot(p);\n\n alpha = residual_old / (p.T.dot(Ap))\n x = x + p * alpha\n r = r - Ap * alpha\n\n if on_iteration is not None:\n on_iteration(iteration, x, r)\n\n residual_new = r.T.dot(r)\n if math.sqrt(residual_new) < tol:\n break\n\n p = r + p * (residual_new / residual_old)\n\n residual_old = residual_new\n\n return x", "def solve_return_conv(self, x_0, dual_x_0):\n # Sanitize the inputs\n if type(x_0) is not np.ndarray or type(dual_x_0) is not np.ndarray:\n x_0 = np.array(x_0)\n dual_x_0 = np.array(dual_x_0)\n # Make sure that the arrays are column vectors\n x_0 = x_0.reshape(-1, 1)\n dual_x_0 = dual_x_0.reshape(-1, 1)\n\n print (\"Starting SQP minimization...\")\n [x, dual_x, exit_info] = self.globalized_sqp(x_0, dual_x_0)\n convergence_criteria = exit_info['val']\n\n print (exit_info['msg'])\n print (\"Exiting with ||grad[L]|| = {0:e}\".format(convergence_criteria))\n print (\"x = {0}\".format(x.reshape(-1)))\n print (\"dual_x = {0}\".format(dual_x.reshape(-1)))\n\n return [x, dual_x, convergence_criteria]", "def calc_jacobian_numerical(model, x, dim, device, eps=1e-6):\n\n # set to eval mode but remember original state\n in_training: bool = model.training\n model.eval() # otherwise we will get 0 gradients\n\n # clone input to avoid problems\n x = x.clone().requires_grad_(True)\n\n # init jacobian\n J = torch.zeros(dim, x.shape[1])\n\n # iterate over input dims and perturb\n for j in range(dim):\n delta = torch.zeros(dim).to(device)\n delta[j] = eps\n J[:, j] = (model(x + delta) - model(x)).abs().mean(0) / (2 * eps)\n\n # reset to original state\n if in_training is True:\n model.train()\n\n return J", "def main_function(self):\n self.ana_cont_probl = cont.AnalyticContinuationProblem(im_axis=self.input_data.mats,\n im_data=self.input_data.value.real,\n re_axis=self.realgrid.grid,\n kernel_mode='freq_bosonic')\n model = np.ones_like(self.realgrid.grid)\n model /= np.trapz(model, self.realgrid.grid)\n\n preblur, bw = self.get_preblur()\n\n sol = self.ana_cont_probl.solve(method='maxent_svd',\n optimizer='newton',\n alpha_determination='chi2kink',\n model=model,\n stdev=self.input_data.error,\n interactive=False, alpha_start=1e10, alpha_end=1e-3,\n preblur=preblur, blur_width=bw)\n\n inp_str = 'atom {}, orb {}, spin {}, blur {}: '.format(self.input_data.atom,\n self.input_data.orbital,\n self.input_data.spin,\n bw)\n all_chis = np.isfinite(np.array([s.chi2 for s in sol[1]]))\n res_str = 'alpha_opt={:3.2f}, chi2(alpha_opt)={:3.2f}, min(chi2)={:3.2f}'.format(\n sol[0].alpha, sol[0].chi2, np.amin(all_chis)\n )\n self.text_output.append(inp_str + res_str)\n alphas = [s.alpha for s in sol[1]]\n chis = [s.chi2 for s in sol[1]]\n\n self.output_data.update(self.realgrid.grid, sol[0].A_opt, self.input_data)\n\n fig, ax = plt.subplots(ncols=2, nrows=2, figsize=(11.75, 8.25)) # A4 paper size\n ax[0, 0].loglog(alphas, chis, marker='s', color='black')\n ax[0, 0].loglog(sol[0].alpha, sol[0].chi2, marker='*', color='red', markersize=15)\n ax[0, 0].set_xlabel(r'$\\alpha$')\n ax[0, 0].set_ylabel(r'$\\chi^2(\\alpha)$')\n\n ax[1, 0].plot(self.realgrid.grid, sol[0].A_opt)\n ax[1, 0].set_xlabel(r'$\\omega$')\n ax[1, 0].set_ylabel('spectrum')\n\n ax[0, 1].plot(self.input_data.mats, self.input_data.value.real,\n color='blue', ls=':', marker='x', markersize=5,\n label='Re[data]')\n ax[0, 1].plot(self.input_data.mats, self.input_data.value.imag,\n color='green', ls=':', marker='+', markersize=5,\n label='Im[data]')\n ax[0, 1].plot(self.input_data.mats, sol[0].backtransform.real,\n ls='--', color='gray', label='Re[fit]')\n ax[0, 1].plot(self.input_data.mats, sol[0].backtransform.imag,\n color='gray', label='Im[fit]')\n ax[0, 1].set_xlabel(r'$\\nu_n$')\n ax[0, 1].set_ylabel(self.input_data.data_type)\n ax[0, 1].legend()\n\n ax[1, 1].plot(self.input_data.mats, (self.input_data.value - sol[0].backtransform).real,\n ls='--', label='real part')\n ax[1, 1].plot(self.input_data.mats, (self.input_data.value - sol[0].backtransform).imag,\n label='imaginary part')\n ax[1, 1].set_xlabel(r'$\\nu_n$')\n ax[1, 1].set_ylabel('data $-$ fit')\n ax[1, 1].legend()\n plt.tight_layout()\n plt.show()", "def convolved_j1(self, _x, delta_x):\n return (\n j1(_x-0.5*delta_x) +\n 4.*j1(_x) +\n j1(_x+0.5*delta_x)\n )/6.", "def build_jacobian(l_comp, R_comp, l_vect, R_vect, B_vect):\r\n l_len = numpy.sqrt((l_vect * l_vect).sum(-1))\r\n R_len = numpy.sqrt((R_vect * R_vect).sum(-1))\r\n B_len = numpy.sqrt((B_vect * B_vect).sum(-1))\r\n # Empty 3x3 jacobian matrix\r\n jacob = numpy.zeros((B_vect.shape[-1], B_vect.shape[-1]), B_vect.dtype)\r\n\r\n # This is in the space with a standard basis along the \"l\", \"R\" and \"B\" axes\r\n jacob[1, 2] = -B_len / R_len\r\n jacob[2, 0] = l_comp\r\n jacob[2, 1] = R_comp\r\n\r\n # Transform the Jacobian to main space\r\n xform = numpy.stack((\r\n l_vect / l_len,\r\n R_vect / R_len,\r\n B_vect / B_len\r\n )).T\r\n xform_inv = numpy.linalg.inv(xform)\r\n return numpy.matmul(xform, numpy.matmul(jacob.T, xform_inv)).T", "def cal_ja(Y,t,voltage_clamp_func,voltage_clamp_params):\n v = voltage_clamp_func(t,voltage_clamp_params)\n m = Y[0]\n \n tfa = 1.\n ki = 0.001 # (mM)\n \n cao = 2.5 # Davidson (mM)\n \" To do: make cai variable as an input like voltage \"\n cai = 1.e-4 # (mM) Roughly values (100 nM) from Intracellular calcium regulation among subpopulations of rat dorsal root ganglion neurons by Lu, Zhang, Gold 2007\n \n celsius = 37.\n \n def alpha(v):\n return 15.69*(81.5 - v)/(np.exp((-1.0*v+81.5)/10.0)-1.0)\n def beta(v):\n return 0.29*np.exp(-v/10.86)\n def KTF(celsius):\n return ((25./293.15)*(celsius + 273.15))\n def efun(z):\n return np.array([1 - i/2 if i < 1e-4 else i/(np.exp(i)-1) for i in z])\n def calc_ghk(v, cai, cao): \n f = KTF(celsius)/2\n nu = v/f\n return -f*(1. - (cai/cao)*np.exp(nu))*efun(nu)\n\n a = alpha(v)\n b = beta(v)\n tau = 1./(tfa*(a + b))\n minf = a/(a+b)\n dm = (minf - m)/tau\n \n \"\"\" Calculating the current \n # h gate\n h2 = ki/(ki+cai)\n gcalbar = 0.003\n ghk = calc_ghk(v,cai,cao)\n ical = gcalbar*m*m*h2*ghk\n \"\"\"\n return [dm]", "def CG(A, b, x0, eps=0.01, imax=50):\n i = 0\n x = x0\n # residue\n r = b - A @ x\n # step in the direction of residue\n d = r\n # initial delta^2\n delta_new = np.dot(r,r)\n delta_0 = delta_new\n while i < i_max and delta_new > eps**2 * delta_0:\n alpha = delta_new / np.einsum('i,ij,j', d,A,d)\n x += alpha * d\n # correct for floating point error at some point\n # not useful for high tolerance but good to keep\n # in mind\n if i % 50 == 0:\n r = b - A@x\n else:\n r -= alpha*q\n delta_old = delta_new\n delta_new = np.dot(r, r)\n beta = delta_new / delta_old\n d = r + beta*d\n i += 1\n return x", "def _calc_C(self, lambdify=True):\n\n C = None\n C_func = None\n # check to see if we have our term saved in file\n C, C_func = self._load_from_file('C', lambdify)\n\n if C is None and C_func is None:\n # if no saved file was loaded, generate function\n print('Generating centrifugal and Coriolis compensation function')\n\n # first get the inertia matrix\n M = self._calc_M(lambdify=False)\n\n # C_{kj} = sum_i c_{ijk}(q) \\dot{q}_i\n # c_{ijk} = 1/2 * sum_i (\\frac{\\partial M_{kj}}{\\partial q_j} +\n # \\frac{\\partial M_{ki}}{\\partial q_j} - \\frac{\\partial M_{ij}}\n # {\\partial q_k})\n C = sp.zeros(self.N_JOINTS, self.N_JOINTS)\n for kk in range(self.N_JOINTS):\n for jj in range(self.N_JOINTS):\n for ii in range(self.N_JOINTS):\n dMkjdqi = M[kk, jj].diff(self.q[ii])\n dMkidqj = M[kk, ii].diff(self.q[jj])\n dMijdqk = M[ii, jj].diff(self.q[kk])\n C[kk, jj] += .5 * (dMkjdqi + dMkidqj - dMijdqk) * self.dq[ii]\n C[kk, jj] = C[kk, jj]\n C = sp.Matrix(C)\n\n # save to file\n abr_control.utils.os_utils.makedirs(\n '%s/C' % self.config_folder)\n cloudpickle.dump(C, open(\n '%s/C/C' % self.config_folder, 'wb'))\n\n if lambdify is False:\n # if should return expression not function\n return C\n\n if C_func is None:\n C_func = self._generate_and_save_function(\n filename='C', expression=C,\n parameters=self.q+self.dq)\n return C_func", "def conjgrad(A, Y, sigma, X0=None, maxiters=None, tol=1e-2):\n Y, m, n, d, matrix_in = _format_system(A, Y)\n\n damp = m * sigma**2\n rtol = tol * np.sqrt(m)\n G = lambda x: np.dot(A.T, np.dot(A, x)) + damp * x\n B = np.dot(A.T, Y)\n\n X = np.zeros((n, d)) if X0 is None else np.array(X0).reshape((n, d))\n iters = -np.ones(d, dtype='int')\n for i in range(d):\n X[:, i], iters[i] = _conjgrad_iters(\n G, B[:, i], X[:, i], maxiters=maxiters, rtol=rtol)\n\n info = {'rmses': _rmses(A, X, Y), 'iterations': iters}\n return X if matrix_in else X.flatten(), info", "def conjugate(self):\r\n return self.__class__(self._real, -self._imag)" ]
[ "0.71288806", "0.65124166", "0.63342714", "0.62315416", "0.62032104", "0.61986536", "0.61747384", "0.6161469", "0.6153835", "0.61255676", "0.6096203", "0.6096203", "0.6096203", "0.6096203", "0.6096203", "0.6096203", "0.6096203", "0.6096203", "0.6096203", "0.6096203", "0.6096203", "0.6096203", "0.6096203", "0.6096203", "0.6096203", "0.6096203", "0.6070083", "0.6060576", "0.6049096", "0.60361534", "0.603449", "0.6012546", "0.5976163", "0.5962753", "0.59408355", "0.592894", "0.59030944", "0.58976907", "0.5889764", "0.58861953", "0.587643", "0.5860994", "0.58546585", "0.5831426", "0.58250254", "0.5823108", "0.58199966", "0.58015007", "0.57965213", "0.57862955", "0.57715493", "0.5765061", "0.57603085", "0.5760265", "0.5736814", "0.5725132", "0.57240635", "0.5723064", "0.5721334", "0.5708777", "0.5708774", "0.57069814", "0.5701637", "0.5683772", "0.56823707", "0.5678815", "0.5678777", "0.567796", "0.5675536", "0.56707656", "0.56689566", "0.5667877", "0.5665964", "0.56655383", "0.56601226", "0.5657126", "0.56477064", "0.5646668", "0.5645728", "0.5638424", "0.563831", "0.5634758", "0.5634694", "0.5629459", "0.56248045", "0.5610198", "0.56088257", "0.5597757", "0.5583302", "0.55823016", "0.5579536", "0.5560204", "0.55567753", "0.5553062", "0.55444354", "0.5540393", "0.5539869", "0.5539544", "0.5538989", "0.5538723", "0.55373603" ]
0.0
-1
_masked_edge mask the edges of the swath gap
def _masked_edge(var,xac): if np.any(xac>0): ind_gap = (xac==np.nanmin(xac[xac>0])) if ind_gap.size==var.size: if ind_gap.shape!=var.shape: ind_gap = ind_gap.transpose() var[ind_gap] = np.nan elif ind_gap.size==var.shape[1]: var[:,ind_gap] = np.nan if np.any(xac<0): ind_gap = (xac==np.nanmax(xac[xac<0])) if ind_gap.size==var.size: if ind_gap.shape!=var.shape: ind_gap = ind_gap.transpose() var[ind_gap] = np.nan elif ind_gap.size==var.shape[1]: var[:,ind_gap] = np.nan return var
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def subgraph_mask(self, size):\n init_matrix = np.random.randn(size,size)\n Tcs = csgraph.minimum_spanning_tree(init_matrix)\n mask_matrix = Tcs.toarray()\n return mask_matrix", "def mask(self):", "def mask(self):\n mask = np.zeros((self.height, self.width))\n pts = [\n np.array(anno).reshape(-1, 2).round().astype(int)\n for anno in self.segmentation\n ]\n mask = cv2.fillPoly(mask, pts, 1)\n return mask", "def mask(self):\n\n mask = np.zeros(shape=(self._info.height, self._info.width), dtype=np.uint8)\n\n self.draw(image=mask, color=constants.COLOR_WHITE_MONO)\n\n mask_with_border = np.pad(mask, 1, 'constant', constant_values=255)\n\n cv2.floodFill(image=mask,\n mask=mask_with_border,\n seedPoint=(int(self.middle_point[0]), int(self.middle_point[1])),\n newVal=constants.COLOR_WHITE_MONO)\n\n return mask", "def mask_border(self, left=3, right=3, top=3, bottom=3):\n self.MaskPrefix = 'b' + self.MaskPrefix #prepend 'b' for border\n print('Masking edge pixels: left={0}, right={1}, top={2}, bottom={3}'.format(left,right,top,bottom))\n for ig in self.Set:\n igram = self.load_ma(ig)\n igram[:top,:] = ma.masked\n igram[-bottom:,:] = ma.masked\n igram[:,:left] = ma.masked\n igram[:,-right:] = ma.masked\n mskFile = self.MaskPrefix + 'Mask_' + ig.Name[:-4]\n np.save(os.path.join(self.ProcDir, mskFile), igram.mask)\n print(mskFile)\n print('mask_border() complete: {0} interferograms'.format(self.Set.Nig))", "def __filterEdges(self):", "def clean_edges(self):", "def _build_mask(self, xg, yg):\n\n # 1. create mask based on meshes\n points = np.vstack((xg.flatten(), yg.flatten())).T\n\n # 2. extract edge points using el_pos\n edge_points = self.node[np.arange(16)]\n path = Path(edge_points, closed=False)\n mask = path.contains_points(points)\n\n return mask", "def _set_ghost_mask(self, rdd):\n tau, N, dom_mins, dom_maxs = self.tau, self.N, self.dom_mins, self.dom_maxs\n container_mins, container_maxs = self.container_mins, self.container_maxs\n buff_mins, buff_maxs = self.buff_mins, self.buff_maxs\n\n def ghost_map_wrapper(iterator): \n for arr in iterator: \n ghost_mask(arr, tau, N, container_mins, container_maxs, \n buff_mins, buff_maxs, dom_mins, dom_maxs)\n yield arr\n\n return rdd.mapPartitions(ghost_map_wrapper, preservesPartitioning=True)", "def shrink_mask(self):\n m = self._mask\n if m.ndim and not m.any():\n self._mask = nomask\n return self", "def frame_missing_mask(mask, kernel_shape, sym_upper=False, max_dist=None):\n if mask.dtype != bool:\n raise ValueError(\"Mask must contain boolean values\")\n if not sp.issparse(mask):\n raise ValueError(\"Mask must be a sparse matrix\")\n\n framed_mask = mask.copy()\n ms, ns = mask.shape\n mk, nk = kernel_shape\n if sym_upper and (max_dist is not None):\n # Remove diagonals further than scan distance in the input mask\n framed_mask = diag_trim(framed_mask, max_dist + max(nk, mk)).tocsr()\n max_m = max_dist + mk\n max_n = max_dist + nk\n else:\n max_m, max_n = ms, ns\n # Up and down margins initialized with zeros and filled as needed\n margin_1 = sp.csr_matrix((mk - 1, ns), dtype=bool)\n margin_2 = sp.csr_matrix((mk - 1, ns), dtype=bool)\n if sym_upper and (max_dist is not None):\n # Margin 1 (top) is in upper triangle -> fill missing up to scan dist\n margin_1[:, :max_n] = 1\n else:\n margin_1[:, :] = 1\n margin_2[:, :] = 1\n framed_mask = sp.vstack([margin_1, framed_mask, margin_2], format=\"csr\")\n\n # Left and right\n margin_1 = sp.csr_matrix((ms + 2 * (mk - 1), nk - 1), dtype=bool)\n margin_2 = sp.csr_matrix((ms + 2 * (mk - 1), nk - 1), dtype=bool)\n\n if sym_upper and (max_dist is not None):\n # Margin 2 (right) is in upper triangle-> fill missing up to scan dist\n margin_2[-(max_m + 1) :, :] = 1\n # Fill only the start of left margin for the top-left corner\n margin_1[: mk - 1, :] = 1\n else:\n margin_1[:, :] = 1\n margin_2[:, :] = 1\n framed_mask = sp.hstack([margin_1, framed_mask, margin_2], format=\"csr\")\n\n if sym_upper:\n # LIL format is much faster when changing sparsity\n framed_mask = framed_mask.tolil()\n # Add margin below diagonal\n big_k = max(nk, mk)\n dia_margins = np.ones(big_k)\n dia_offsets = np.arange(-1, -big_k-1, -1)\n framed_mask += sp.diags(\n dia_margins,\n dia_offsets,\n shape=framed_mask.shape,\n format=\"lil\",\n dtype=bool,\n )\n framed_mask = framed_mask.tocsr()\n return framed_mask", "def num_fill_edges(mask,i):\n n = mask.shape[0]\n nb = np.nonzero(mask[i, :])[0]\n clique_edges = nb.shape[0]*(nb.shape[0]-1)/2\n current_edges = mask[np.ix_(nb, nb)].sum()/2\n return clique_edges - current_edges", "def fill_sat_holes (data_mask, mask_value):\n\n value_sat = mask_value['saturated']\n value_satcon = mask_value['saturated-connected']\n mask_satcon = ((data_mask & value_sat == value_sat) |\n (data_mask & value_satcon == value_satcon))\n struct = np.ones((3,3), dtype=bool)\n mask_satcon = ndimage.binary_closing(mask_satcon, structure=struct)\n mask_satcon = ndimage.binary_fill_holes(mask_satcon, structure=struct)\n mask_satcon2add = (mask_satcon & (data_mask==0))\n data_mask[mask_satcon2add] = value_satcon", "def __generate_mask(self):\n mask = np.concatenate([np.ones(len(self.fixed[0])),\n np.zeros(self.num_points),\n np.ones(len(self.fixed[1]))])\n return mask", "def get_masked_scene(orig, mask, local_context_size = 80, dilation=False):\n orig_scene = orig.copy()\n mask_scene = mask.copy()\n orig_scene_no_mask = orig.copy()\n \n mask_info = np.where(mask_scene == 0) \n min_x = max(min(mask_info[0]) - local_context_size, 0)\n max_x = max(mask_info[0]) + local_context_size\n min_y = max(min(mask_info[1]) - local_context_size, 0)\n max_y = max(mask_info[1]) + local_context_size\n \n orig_scene = orig_scene[min_x:max_x,min_y:max_y]\n orig_scene_no_mask = orig_scene_no_mask[min_x:max_x,min_y:max_y]\n mask_scene = mask_scene[min_x:max_x,min_y:max_y]\n \n dialation_mask = np.zeros(mask_scene.shape) + 255\n \n if dilation:\n dialation_mask = cv2.dilate(255-mask_scene, np.ones((local_context_size,local_context_size)))\n \n #implot(dialation_mask)\n #plt.imshow(dialation_mask, 'gray')\n \n for x in range(mask_scene.shape[0]):\n for y in range(mask_scene.shape[1]):\n if mask_scene[x, y] == 0:\n orig_scene[x, y, :] = 0\n orig_scene_no_mask[x,y,:] = 0\n if dilation:\n if dialation_mask[x,y] == 0:\n orig_scene[x, y, :] = 0\n \n return orig_scene, mask_scene, orig_scene_no_mask, dialation_mask", "def mask_gradient(self, override=False):\n self.MaskPrefix = 'g' + self.MaskPrefix #append prefix 'g' for gradient\n print('applying gradient filter to remove edge effects and isolated unwrapping errors')\n # If a signal mask exists, use it to prevent np.gradient() from scrapping important data\n indSignal = np.zeros(self.Set.Size)\n if override:\n #manually created boolean array, 1=pixel containing known signal\n indSignal = np.load(override)\n\n for ig in self.Set:\n igram = self.load_ma(ig)\n Fx, Fy = np.gradient(phase) #locate pixels adjacent to NaNs\n Fx[indSignal] = 1\n Fy[indSignal] = 1\n igram[np.isnan(Fx)] = ma.masked\n igram[np.isnan(Fx)] = ma.masked\n mskFile = self.MaskPrefix + 'Mask_' + ig.Name[:-4]\n np.save(os.path.join(self.ProcDir, mskFile), igram.mask)\n print(mskFile)\n print('Done')", "def fill_vert(self, mask):\n im_floodfill = np.copy(mask)\n im_floodfill[im_floodfill!=self.vertebra_id] = 0\n im_floodfill[im_floodfill==self.vertebra_id] = 255\n im_floodfill_copy = np.copy(im_floodfill)\n # The size needs to be 2 pixels larger than the image.\n h, w = im_floodfill.shape[:2]\n mask4mask = np.zeros((h+2, w+2), np.uint8)\n # Floodfill from point (0, 0)\n cv2.floodFill(im_floodfill, mask4mask, (0,0), 255)\n # Invert floodfilled image\n im_floodfill_inv = cv2.bitwise_not(im_floodfill)\n # Combine the two images to get the foreground.\n im_floodfill_inv = im_floodfill_inv | im_floodfill_copy\n im_floodfill_inv[im_floodfill_inv==255] = self.vertebra_id\n mask_filled = mask | im_floodfill_inv\n return mask_filled", "def get_arrow_mask(self, frame):\n # Adapte contrast and put in grayscale\n img = skimage.exposure.equalize_adapthist(frame)\n img = skimage.color.rgb2gray(img)\n # # apply Otsu thresholding method\n thres = skimage.filters.threshold_otsu(img)\n mask = np.where(img < skimage.filters.threshold_otsu(img), True, False)\n # Morphological cleaning\n mask = skimage.morphology.binary_opening(mask, skimage.morphology.disk(5))\n # Remove corners of room\n mask = skimage.segmentation.flood_fill(mask, (0,0), 0)\n mask = skimage.segmentation.flood_fill(mask, (0,mask.shape[1]-1), 0)\n mask = skimage.segmentation.flood_fill(mask, (mask.shape[0]-1,0), 0)\n mask = skimage.segmentation.flood_fill(mask, (mask.shape[0]-1,mask.shape[1]-1), 0)\n\n return mask", "def remove_self_loops(edge_index: np.ndarray) -> np.ndarray:\n mask = []\n for i in range(edge_index.shape[1]):\n if edge_index[0][i] != edge_index[1][i]:\n # not a self-loop\n mask.append(i)\n return edge_index[:, mask]", "def Mask(self) -> int:", "def applymask(self,mask):\n self.spec[mask==0]=np.nan", "def getMask(self):\r\n mask = np.array(self.array, dtype=np.float32)\r\n mask[mask == 0] = np.nan\r\n return mask", "def remove_edge(self, edge: Edge) -> Edge:", "def mask2trimap(self, mask):\n fg_mask = (mask > 0).float()\n bg_mask = (mask < 0).float()\n trimap_width = getattr(self.opt, 'trimap_width', 20)\n trimap_width *= bg_mask.shape[-1] / self.opt.width\n trimap_width = int(trimap_width)\n bg_mask = cv2.erode(bg_mask.numpy(), kernel=np.ones((trimap_width, trimap_width)), iterations=1)\n bg_mask = torch.from_numpy(bg_mask)\n mask = fg_mask - bg_mask\n return mask", "def crop_missing(self):\n new_data = numpy.ma.copy(self.data)\n new_edges = list(self.bset.edges) # Mutable copy\n\n # Remove all-masked edge slices along all dimensions\n for axis in range(new_data.ndim):\n # Bring axis to front\n new_data = numpy.ma.swapaxes(new_data, 0, axis)\n\n # Find first slice to keep\n try:\n first = next(i for (i, mask) in\n enumerate(numpy.ma.getmaskarray(new_data))\n if not mask.all())\n new_data = new_data[first:]\n new_edges[axis] = new_edges[axis][first:]\n except StopIteration:\n pass\n\n # Find last slice to keep\n try:\n last = next(i for (i, mask) in\n enumerate(numpy.ma.getmaskarray(new_data)[::-1])\n if not mask.all())\n if last != 0:\n new_data = new_data[:-last]\n new_edges[axis] = new_edges[axis][:-last]\n except StopIteration:\n pass\n\n # Swap back axis\n new_data = numpy.ma.swapaxes(new_data, 0, axis)\n\n return type(self)(new_data, new_edges)", "def mask_region(self, ypos, xpos, r):\r\n for j, i in product(np.arange(ypos - r, ypos + r + 1), np.arange(xpos - r, xpos + 1 + r)): # Create square\r\n if (j - ypos) ** 2 + (i - xpos) ** 2 <= r ** 2 and 0 <= j<= self.shapes[0] - 1 and 0<= i <=self.shapes[1] - 1:\r\n j = int(j)\r\n i = int(i)\r\n self.masked[j, i] = 0", "def _compute_masked_hidden(self, hidden, mask):\r\n mask = mask.unsqueeze(-1).expand_as(hidden)\r\n hidden_masked = hidden[mask].contiguous().view(-1, hidden.size(-1))\r\n return hidden_masked", "def mask_neighbors(self, mask, rad=9, ptrn='r'):\n return um.mask_neighbors(mask, rad, ptrn)", "def unmold_mask(mask, bbox, image_shape):\n threshold = 0.5\n y1, x1, y2, x2 = bbox\n mask = resize(mask, (y2 - y1, x2 - x1))\n mask = np.where(mask >= threshold, 1, 0).astype(np.bool)\n\n # Put the mask in the right location.\n full_mask = np.zeros(image_shape[:2], dtype=np.bool)\n full_mask[y1:y2, x1:x2] = mask\n return full_mask", "def clean_mask(mask, background=0):\n kernels = [\n np.array([[ 1, -1, -1], [-1, 1, -1], [-1, -1, -1]]), # top left standalone pixel\n np.array([[-1, -1, 1], [-1, 1, -1], [-1, -1, -1]]), # top right standalone pixel\n np.array([[-1, -1, -1], [-1, 1, -1], [ 1, -1, -1]]), # bottom left standalone pixel\n np.array([[-1, -1, -1], [-1, 1, -1], [-1, -1, 1]]) # bottom right standalone pixel\n ]\n\n proc_masks = [cv2.morphologyEx(mask, cv2.MORPH_HITMISS, kernel).astype(np.bool) for kernel in kernels]\n\n for proc_mask in proc_masks:\n mask[proc_mask] = background\n return mask", "def __mask_region(self, img, vertices):\n\n mask = np.zeros_like(img) \n if len(img.shape) > 2:\n channel_count = img.shape[2] # i.e. 3 or 4 depending on your image\n ignore_mask_color = (255,) * channel_count\n else:\n ignore_mask_color = 255\n cv2.fillConvexPoly(mask, vertices, ignore_mask_color)\n masked_image = cv2.bitwise_and(img, mask)\n return masked_image", "def identity_mask_propagation(nx_node, nx_graph):\n input_masks = get_input_masks(nx_node, nx_graph)\n assert len(input_masks) == 1\n nx_node[\"input_masks\"] = input_masks\n nx_node[\"output_mask\"] = input_masks[0]", "def identity_mask_propagation(nx_node, nx_graph):\n input_masks = get_input_masks(nx_node, nx_graph)\n assert len(input_masks) == 1\n nx_node['input_masks'] = input_masks\n nx_node['output_mask'] = input_masks[0]", "def fix_generate_mask(self):\r\n mask = np.zeros((self.width, self.height), np.uint8)\r\n size = int((self.width + self.height) * 0.01)\r\n if self.width < 32 or self.height < 32:\r\n raise Exception(\"Width and Height of mask must be at least 64!\")\r\n i = 0\r\n for _ in range(1, int(0.3 * self.width)):\r\n index_xline = random.sample(range(0, self.width), int(0.5 * self.width))\r\n x1 = index_xline[i]\r\n i += 1\r\n thickness = 1\r\n cv2.line(mask, (0, x1), (self.height - 1, x1), 1, thickness)\r\n\r\n return 1 - mask", "def mask_infeasible(self):\n ns = len(self)-1\n # mask entries with i+j+k > ns\n for ii in range(len(self)):\n for jj in range(len(self)):\n for kk in range(len(self)):\n if ii+jj+kk > ns:\n self.mask[ii,jj,kk] = True\n \n return self", "def _mask(self) -> np.ndarray:\n mask = np.ones(self.limits, dtype=bool)\n for ax, shape, limit in zip(\n range(1, len(self.limits)), self.shape, self.limits[1:]\n ):\n ax_mask = np.arange(limit) < np.expand_dims(shape, 1)\n new_shape = np.ones(len(self.limits), dtype=int)\n new_shape[0], new_shape[ax] = self.limits[0], limit\n mask = mask & ax_mask.reshape(*new_shape)\n return mask", "def pad_edges(self, pad):\n weights=[]\n for dim, xy in zip([0, 1], [self.x, self.y]):\n xy0 = np.mean(xy)\n W = xy[-1]-xy[0]\n dist = np.abs(xy-xy0)\n wt=np.ones_like(dist)\n wt[ dist >= W/2 - pad] = 0\n weights += [wt]\n self.weight *= weights[0][:,None].dot(weights[1][None,:])", "def mask(self):\n return np.ones((self.size, self.size))", "def unmold_mask(mask, bbox, image_shape):\n threshold = 0.5\n y1, x1, y2, x2 = bbox\n mask = scipy.misc.imresize(\n mask, (y2 - y1, x2 - x1), interp='bilinear').astype(np.float32) / 255.0\n mask = np.where(mask >= threshold, 1, 0).astype(np.uint8)\n\n # Put the mask in the right location.\n full_mask = np.zeros(image_shape[:2], dtype=np.uint8)\n full_mask[y1:y2, x1:x2] = mask\n return full_mask", "def maskBorder(width,shape):\n assert isinstance(width,int), \"width has to be integer\"\n assert width>0, \"width has to be positive\"\n mask = np.zeros(shape,dtype=bool)\n mask[ :width , : ] = True\n mask[ -width: , : ] = True\n mask[ : , :width ] = True\n mask[ : , -width: ] = True\n return mask", "def watershed(mask, img, plotImage = False, kernelSize = None):\n imgCopy = img.copy()\n maskCopy = np.array(mask.copy(), dtype=np.uint8)\n \n if kernelSize is None:\n kernelSize = 2\n\n # Finding sure foreground area\n #dist_transform = cv2.distanceTransform(mask, cv2.DIST_L2, 5)\n #ret, sure_fg = cv2.threshold(dist_transform,0.3*dist_transform.max(),255,0) #change the second argument to change the sensitivity \n maskClosed = skimage.morphology.closing(np.array(maskCopy, dtype=np.uint8))\n maskClosed = skimage.morphology.closing(np.array(maskClosed, dtype=np.uint8))\n kernel = np.ones((kernelSize,kernelSize), np.uint8)\n # maskCopy = img_as_bool(maskCopy)\n sure_fg = cv2.erode(maskClosed, kernel, iterations = 2) ###\n sure_fg = skimage.morphology.closing(np.array(sure_fg, dtype=np.uint8))\n # kernel = np.ones((2,2), np.uint8)\n # sure_fg = binary_closing(sure_fg, kernel)\n \n # sure background area\n #kernel = np.ones((5, 5), np.uint8)\n #sure_bg = cv2.dilate(mask, kernel, iterations = 1)\n sure_fg_bool = 1 - img_as_bool(sure_fg)\n # sure_bg = np.uint8(1 - morphology.medial_axis(sure_fg_bool)) ### \n sure_bg = np.uint8(1 - morphology.skeletonize(sure_fg_bool))\n sure_bg[0, :] = 1\n sure_bg[-1, :] = 1\n sure_bg[:, 0] = 1\n sure_bg[:, -1] = 1\n \n # Finding unknown region\n sure_fg = np.uint8(sure_fg)\n unknown = cv2.subtract(sure_bg, sure_fg)\n \n if plotImage:\n plt.figure()\n plt.imshow(sure_fg)\n plt.title(\"Inner Marker\")\n plt.figure()\n plt.imshow(sure_bg)\n plt.title(\"Outer Marker\")\n plt.figure()\n plt.imshow(unknown)\n plt.title(\"Unknown\")\n \n # Marker labelling\n ret, markers = cv2.connectedComponents(sure_fg)\n\n # Add one to all labels so that sure background is not 0, but 1\n markers = markers+1\n\n # Now, mark the region of unknown with zero\n markers[unknown==1] = 0\n \n if plotImage:\n plt.figure()\n plt.imshow(markers, cmap='jet')\n plt.title(\"Markers\")\n \n # Do watershed\n markers = cv2.watershed(imgCopy, markers)\n \n imgCopy[markers == -1] = [0, 255 ,0]\n\n if plotImage:\n plt.figure()\n plt.imshow(markers,cmap='jet')\n plt.title(\"Mask\")\n plt.figure()\n plt.imshow(img)\n plt.title(\"Original Image\")\n plt.figure()\n plt.imshow(imgCopy)\n plt.title(\"Marked Image\")\n plt.show()\n\n return markers", "def trim_floating_solid(im):\n holes = find_disconnected_voxels(~im)\n im[holes] = True\n return im", "def _generate_mask(self):\r\n mask = np.zeros((self.width, self.height), np.uint8)\r\n size = int((self.width + self.height) * 0.01)\r\n if self.width < 32 or self.height < 32:\r\n raise Exception(\"Width and Height of mask must be at least 64!\")\r\n for _ in range(randint(1,int(0.5*self.width))):\r\n x1 = randint(0, self.width-1)\r\n thickness = 1\r\n cv2.line(mask, (0, x1),(self.height-1, x1), 1, thickness)\r\n return 1 - mask", "def _mask(self):\n if self.__mask is None:\n # need this to be *exactly* the numpy boolean False\n return nomask\n return self.__mask", "def edge_ground(X):\n gradient_x = img_conv(X, kernel_sobel_x)\n gradient_y = img_conv(X, kernel_sobel_x.transpose())\n mag = (gradient_x ** 2.0 + gradient_y ** 2.0) ** 0.5\n is_edge = mag > 1.0\n return is_edge.astype('f')", "def mask_image(image):\n pass", "def dilate(mask, forbidden):\n new_mask = np.copy(mask)\n # Shift right\n new_mask[:, 1:] |= mask[:, :-1]\n # Shift left\n new_mask[:, :-1] |= mask[:, 1:]\n # Shift up\n new_mask[1:, :] |= mask[:-1, :]\n # Shift down\n new_mask[:-1, :] |= mask[1:, :]\n\n # Shift up-right\n new_mask[1:, 1:] |= mask[:-1, :-1]\n # Shift down-right\n new_mask[:-1, 1:] |= mask[1:, :-1]\n # Shift down-left\n new_mask[:-1, :-1] |= mask[1:, 1:]\n # Shift up-left\n new_mask[1:, :-1] |= mask[:-1, 1:]\n\n new_mask[forbidden] = False\n\n return new_mask", "def mask_test_edges(adj, prop):\n # Remove diagonal elements\n adj = adj - sp.dia_matrix(\n (adj.diagonal()[np.newaxis, :], [0]), shape=adj.shape)\n adj.eliminate_zeros()\n # Check that diag is zero:\n assert np.diag(adj.todense()).sum() == 0\n\n adj_triu = sp.triu(adj)\n adj_tuple = sparse_to_tuple(adj_triu)\n edges = adj_tuple[0]\n edges_all = sparse_to_tuple(adj)[0]\n num_test = int(np.floor(edges.shape[0] * prop))\n\n all_edge_idx = range(edges.shape[0])\n np.random.shuffle(all_edge_idx)\n test_edge_idx = all_edge_idx[:num_test]\n test_edges = edges[test_edge_idx]\n train_edges = np.delete(edges, test_edge_idx, axis=0)\n\n def ismember(a, b, tol=5):\n rows_close = np.all(np.round(a - b[:, None], tol) == 0, axis=-1)\n return np.any(rows_close)\n\n test_edges_false = []\n while len(test_edges_false) < len(test_edges):\n idx_i = np.random.randint(0, adj.shape[0])\n idx_j = np.random.randint(0, adj.shape[0])\n if idx_i == idx_j:\n continue\n if ismember([idx_i, idx_j], edges_all):\n continue\n if test_edges_false:\n if ismember([idx_j, idx_i], np.array(test_edges_false)):\n continue\n if ismember([idx_i, idx_j], np.array(test_edges_false)):\n continue\n test_edges_false.append([idx_i, idx_j])\n\n assert ~ismember(test_edges_false, edges_all)\n assert ~ismember(test_edges, train_edges)\n\n data = np.ones(train_edges.shape[0])\n\n # Re-build adj matrix\n adj_train = sp.csr_matrix((data, (train_edges[:, 0], train_edges[:, 1])),\n shape=adj.shape)\n adj_train = adj_train + adj_train.T\n\n # NOTE: these edge lists only contain single direction of edge!\n num_nodes = adj.shape[0]\n test_mask = np.zeros((num_nodes, num_nodes))\n for i, j in test_edges:\n test_mask[i, j] = 1\n test_mask[j, i] = 1\n for i, j in test_edges_false:\n test_mask[i, j] = 1\n test_mask[j, i] = 1\n return adj_train, sparse_to_tuple(test_mask)", "def mask(self, mask):\n return MaskedDistribution(self, mask)", "def is_masked(self):\n return False", "def attention_mask(model, x):\n config = model.config\n input_mask = model.inputs[\"input_mask\"]\n final_mask = model.builder.customOp(opName=\"AttentionMask\",\n opVersion=1,\n domain=\"ai.graphcore\",\n inputs=[input_mask, x],\n attributes={\"dataType\": model.config.popart_dtype})[0]\n final_mask = model.detach(final_mask)\n return final_mask", "def remove_edges(self, node: NodeKey) -> Edge:", "def interface_endpoints_mask(cell_a, cell_b):\n dilated_a = binary_dilation(cell_a, selem=np.ones((3, 3)))\n dilated_b = binary_dilation(cell_b, selem=np.ones((3, 3)))\n edge_interface = np.logical_and(dilated_a, dilated_b)\n\n # Get outer-most edge of cell pair\n pair = np.logical_or(cell_a, cell_b)\n pair_dilated = binary_dilation(pair)\n pair_edge = np.logical_xor(pair_dilated, pair)\n\n # Find overlap of the edge masks\n corners_mask = np.logical_and(pair_edge, edge_interface)\n return corners_mask", "def find_resting_edge(sensor):\n resting = sensor.data[\"mask_a_resting\"]\n\n trigger_val = 0.5\n mask1 = (resting[:-1] < trigger_val) & (resting[1:] > trigger_val)\n mask2 = (resting[:-1] > trigger_val) & (resting[1:] < trigger_val)\n\n index_start_resting = np.flatnonzero(mask1)+1\n mask_start_resting = np.zeros(len(resting))\n mask_start_resting[index_start_resting] = 1\n\n sensor.data[\"index_start_resting\"] = index_start_resting\n sensor.data[\"mask_start_resting\"] = mask_start_resting\n\n index_stop_resting = np.flatnonzero(mask2)+1\n mask_stop_resting = np.zeros(len(resting))\n mask_stop_resting[index_stop_resting] = 1\n\n sensor.data[\"index_stop_resting\"] = index_stop_resting\n sensor.data[\"mask_stop_resting\"] = mask_stop_resting", "def test_odd(self):\n actual = cm.ring_mask((5, 5), 1, 2)\n expected = np.array([[False, False, True, False, False],\n [False, True, False, True, False],\n [True, False, False, False, True],\n [False, True, False, True, False],\n [False, False, True, False, False]])\n self.assertIsNone(np.testing.assert_array_equal(actual, expected))", "def generate_mask(self, thresh=50, b_ground=None):\n img = self.load_image()\n thresh = np.zeros(img.shape, \"uint8\")\n if b_ground is not None:\n img = img - b_ground\n thresh[img > 25] = 255\n mask = ndimage.morphology.binary_dilation(thresh).astype(\"uint8\")\n self.mask = 255*mask", "def shade_neighbours(x: int, y: int) -> None:\r\n if x > 0:\r\n safeboard[x-1, y] = 0\r\n if x < shape-1:\r\n safeboard[x+1, y] = 0\r\n if y > 0:\r\n safeboard[x, y-1] = 0\r\n if y < shape-1:\r\n safeboard[x, y+1] = 0\r\n safeboard[x, y] = 0", "def fill_blind_pores(im):\n holes = find_disconnected_voxels(im)\n im[holes] = False\n return im", "def close_mask_in(im_slice_2d, side):\n new_slice = im_slice_2d.copy()\n \n x_no_0, y_no_0 = np.nonzero(im_slice_2d)\n if len(x_no_0) == 0: return new_slice, new_slice\n #breakpoint()\n x1 = x_no_0.min() \n x2 = x_no_0.max()\n if side == \"l\":\n x_mid = x2; x_aux1 = x_mid - 9 + 1; x_aux2 = x2 + 1\n elif side == \"r\":\n x_mid = x1; x_aux2 = x_mid + 9; x_aux1 = x1\n \n y_mid = y_no_0[np.where(x_no_0==x_mid)[0]].min()\n y_min = y_no_0.min()\n \n # inferior line\n new_slice[x1:x2+1, y_min] = 1\n # medial line\n new_slice[x_mid, y_min:y_mid+1] = 1\n new_slice = binary_fill_holes(new_slice)\n # in_short array:\n other_slice = new_slice.copy() \n other_slice[x_aux1:x_aux2, :] = 0\n \n return new_slice, other_slice", "def __relax_edge(self, node):\n if node == str(self.get_start()):\n distance_to_parent = 0 # no parent for the starting point\n else:\n parent = self.path[node]\n distance_to_parent = self.distances[parent] + 1\n # try to relax the stretched edge\n if self.distances[node] > distance_to_parent:\n self.distances[node] = distance_to_parent", "def _remove_edges_mincut(self, operation_id: np.uint64, source_id: np.uint64,\n sink_id: np.uint64, source_coord: Sequence[int],\n sink_coord: Sequence[int],\n bb_offset: Tuple[int, int, int] = (120, 120, 12)\n ) -> Tuple[\n bool, # success\n Optional[Tuple[\n List[np.uint64], # new_roots\n List[bigtable.row.Row], # rows\n np.ndarray, # removed_edges\n datetime.datetime]]]: # timestamp\n\n time_start = time.time() # ------------------------------------------\n\n bb_offset = np.array(list(bb_offset))\n source_coord = np.array(source_coord)\n sink_coord = np.array(sink_coord)\n\n # Decide a reasonable bounding box (NOT guaranteed to be successful!)\n coords = np.concatenate([source_coord[:, None],\n sink_coord[:, None]], axis=1).T\n bounding_box = [np.min(coords, axis=0), np.max(coords, axis=0)]\n\n bounding_box[0] -= bb_offset\n bounding_box[1] += bb_offset\n\n root_id_source = self.get_root(source_id)\n root_id_sink = self.get_root(source_id)\n\n # Verify that sink and source are from the same root object\n if root_id_source != root_id_sink:\n print(\"root(source) != root(sink)\")\n return False, None\n\n print(\"Get roots and check: %.3fms\" %\n ((time.time() - time_start) * 1000))\n time_start = time.time() # ------------------------------------------\n\n root_id = root_id_source\n\n # Get edges between local supervoxels\n n_chunks_affected = np.product((np.ceil(bounding_box[1] / self.chunk_size)).astype(np.int) -\n (np.floor(bounding_box[0] / self.chunk_size)).astype(np.int))\n print(\"Number of affected chunks: %d\" % n_chunks_affected)\n print(\"Bounding box:\", bounding_box)\n print(\"Bounding box padding:\", bb_offset)\n print(\"Atomic ids: %d - %d\" % (source_id, sink_id))\n print(\"Root id:\", root_id)\n\n edges, affs = self.get_subgraph(root_id, get_edges=True,\n bounding_box=bounding_box,\n bb_is_coordinate=True)\n\n print(\n \"Get edges and affs: %.3fms\" % ((time.time() - time_start) * 1000))\n time_start = time.time() # ------------------------------------------\n\n # Compute mincut\n atomic_edges = mincut.mincut(edges, affs, source_id, sink_id)\n\n print(\"Mincut: %.3fms\" % ((time.time() - time_start) * 1000))\n time_start = time.time() # ------------------------------------------\n\n if len(atomic_edges) == 0:\n print(\"WARNING: Mincut failed. Try again...\")\n return False, None\n\n # Check if any edge in the cutset is infinite (== between chunks)\n # We would prevent such a cut\n\n atomic_edges_flattened_view = atomic_edges.view(dtype='u8,u8')\n edges_flattened_view = edges.view(dtype='u8,u8')\n\n cutset_mask = np.in1d(edges_flattened_view, atomic_edges_flattened_view)\n if np.any(np.isinf(affs[cutset_mask])):\n print(\"inf in cutset\")\n return False, None\n\n # Remove edges\n success, result = self._remove_edges(operation_id, atomic_edges)\n\n if not success:\n print(\"remove edges failed\")\n return False, None\n\n new_roots, rows, time_stamp = result\n\n print(\"Remove edges: %.3fms\" % ((time.time() - time_start) * 1000))\n time_start = time.time() # ------------------------------------------\n\n return True, (new_roots, rows, atomic_edges, time_stamp)", "def add_mask_layer(self):\n return Masking(mask_value=self.mask_value, input_shape=(self.max_sequence_size, 1))", "def inflate_mask(mask):\n kernel = np.ones((12, 12), np.uint8)\n return cv2.dilate(mask, kernel, 1)", "def geometry_mask(self, gdf, all_touched=False, invert=False, **kwargs):\n gdf1 = gdf.copy()\n gdf1[\"mask\"] = np.full(gdf.index.size, (not invert), dtype=np.uint8)\n da_out = self.rasterize(\n gdf1,\n col_name=\"mask\",\n all_touched=all_touched,\n nodata=np.uint8(invert),\n **kwargs,\n )\n # remove nodata value before converting to boolean\n da_out.attrs.pop(\"_FillValue\", None)\n return da_out.astype(bool)", "def _source_mask(self, ilens):\n x_masks = make_non_pad_mask(ilens)\n return x_masks.unsqueeze(-2)", "def subsequent_mask(mask_size):\n mask_shape = (1, mask_size, mask_size)\n # Create a lower-triangle matrix at the primary diagonal (0th)\n # such that all the elements above the diagonal are 0.\n mask = np.tril(np.ones(mask_shape), k=0).astype('uint8')\n mask = torch.from_numpy(mask)\n return mask", "def get_mask(self, anno, img_info) -> np.ndarray:\n m = np.zeros((img_info[\"height\"], img_info[\"width\"]), dtype=np.float32)\n\n for obj in anno:\n if obj[\"iscrowd\"]:\n rle = pycocotools.mask.frPyObjects(obj[\"segmentation\"], img_info[\"height\"], img_info[\"width\"])\n mask = pycocotools.mask.decode(rle)\n if mask.shape != m.shape:\n logger.warning(f\"Mask shape {mask.shape} does not match image shape {m.shape} for image {img_info['file_name']}\")\n continue\n m += mask\n elif obj[\"num_keypoints\"] == 0:\n rles = pycocotools.mask.frPyObjects(obj[\"segmentation\"], img_info[\"height\"], img_info[\"width\"])\n for rle in rles:\n mask = pycocotools.mask.decode(rle)\n if mask.shape != m.shape:\n logger.warning(f\"Mask shape {mask.shape} does not match image shape {m.shape} for image {img_info['file_name']}\")\n continue\n\n m += mask\n\n return (m < 0.5).astype(np.float32)", "def _suppress_bg_dc(self):\n # mask for suppressing background/don't care classes\n suppress_mask = 1 - (self.classification_mask[0] + self.classification_mask[1])\n # Suppress bounding box mask\n for i in range(self.num_coords):\n self.bbox_mask[i] = np.multiply(self.bbox_mask[i], suppress_mask)\n # Suppress for depth mask\n self.depth_mask = np.multiply(self.depth_mask, suppress_mask)\n return suppress_mask", "def mask_button_press(self):\n\n self.segmentation_opacity = 1 - self.segmentation_opacity\n self.segmentation_image.setImage(self._model.segmentation_image, opacity=self.segmentation_opacity)", "def maskLowerLeftBorder(\n depth_image,\n background_mask,\n sigma=1, l_thresh=0, h_thresh=1000,\n axis_tol=0.1, hough_thresh_ratio=0.4,\n x_max_thresh=0.1, y_min_thresh=0.75, margin=10):\n\n # Find rudimentary edges in the image\n masked_image = depth_image.copy()\n masked_image[background_mask] = 0\n edge_image = feature.canny(\n masked_image,\n sigma=sigma,\n low_threshold=l_thresh,\n high_threshold=h_thresh\n )\n\n num_rows, num_cols = edge_image.shape\n\n x_mid = num_cols / 2\n y_mid = num_rows / 2\n x_max = x_max_thresh * num_cols\n y_min = y_min_thresh * num_rows\n\n hough_mask = np.zeros_like(edge_image, dtype=bool)\n\n # Detect lines using the Hough transform\n h, theta, d = transform.hough_line(edge_image)\n __, angles, dists = transform.hough_line_peaks(\n h, theta, d,\n threshold=hough_thresh_ratio * h.max()\n )\n\n # Filter Hough lines and mask the border if appropriate\n for angle, dist in zip(angles, dists):\n if geometry.axisAligned(angle, tol=axis_tol, axis='horizontal'):\n y = geometry.solveLine(angle, dist, x=x_mid)\n if y > y_min:\n hough_mask[int(y) - margin:, :] = True\n elif geometry.axisAligned(angle, tol=axis_tol, axis='vertical'):\n x = geometry.solveLine(angle, dist, y=y_mid)\n if x < x_max:\n hough_mask[:, :int(x) + margin] = True\n else:\n continue\n\n return hough_mask", "def mask_annular(m, bound):\n rmin, rmax, cmin, cmax = bound\n m[:rmin,:] = True\n m[rmax:,:] = True\n m[:,:cmin] = True\n m[:,cmax:] = True\n return m", "def get_mask(self, shape):\n h, w = shape[0:2]\n y, x = np.mgrid[:h, :w]\n points = np.transpose((x.ravel(), y.ravel()))\n\n mask = _nxutils_points_inside_poly(points, self.verts)\n #mask = nxutils.points_inside_poly(points, self.verts)\n return mask.reshape(h, w)", "def square_mask(ys_in_pad, ignore_id):\n ys_mask = (ys_in_pad != ignore_id).unsqueeze(-2)\n ymax = ys_mask.size(-1)\n ys_mask_tmp = ys_mask.transpose(1, 2).repeat(1, 1, ymax)\n ys_mask = ys_mask.repeat(1, ymax, 1) & ys_mask_tmp\n\n return ys_mask", "def morphological_dilation(masked_image, n): #n=3\r\n\tmask = np.isnan(masked_image)\r\n\ts = ndimage.morphology.generate_binary_structure(2, 1)\r\n\textended_mask = ndimage.binary_dilation(mask, structure=s, iterations=3).astype(mask.dtype)\r\n\treturn extended_mask\r\n\t#mask = np.isnan(masked_image)\r\n\t#idx = np.flatnonzero(mask)\r\n\t#expanded_idx = idx[:,None] + np.arange(1, n)\r\n\t#np.put(mask, expanded_idx, True, 'clip')\r\n\t#return mask\r", "def remove_border_vals(img, x: torch.Tensor, y: torch.Tensor, c: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \r\n\r\n new_x = img.shape[3] - 8\r\n new_y = img.shape[2] - 8\r\n \r\n mask = x.ge(8) & x.le(new_x) & y.ge(8) & y.le(new_y)\r\n x = torch.masked_select(x, mask)\r\n y = torch.masked_select(y, mask)\r\n c = torch.masked_select(c, mask)\r\n\r\n return x, y, c", "def frame_fix_badpix_isolated(array, bpm_mask=None, sigma_clip=3, num_neig=5,\n size=5, protect_mask=0, cxy=None, mad=False, \n ignore_nan=True, verbose=True, full_output=False):\n if array.ndim != 2:\n raise TypeError('Array is not a 2d array or single frame')\n if size % 2 == 0:\n raise TypeError('Size of the median blur kernel must be an odd integer')\n\n if bpm_mask is not None:\n bpm_mask = bpm_mask.astype('bool')\n\n if verbose: start = time_ini()\n\n if num_neig > 0:\n neigh = True\n else:\n neigh = False\n\n frame = array.copy()\n if cxy is None:\n cy, cx = frame_center(frame)\n else:\n cx, cy = cxy\n \n if bpm_mask is None:\n ori_nan_mask = np.where(np.isnan(frame))\n ind = clip_array(frame, sigma_clip, sigma_clip, neighbor=neigh,\n num_neighbor=num_neig, mad=mad)\n bpm_mask = np.zeros_like(frame)\n bpm_mask[ind] = 1\n if ignore_nan:\n bpm_mask[ori_nan_mask] = 0\n if protect_mask:\n cir = disk((cy, cx), protect_mask, shape=bpm_mask.shape)\n bpm_mask[cir] = 0\n bpm_mask = bpm_mask.astype('bool')\n\n smoothed = median_filter(frame, size, mode='mirror')\n frame[np.where(bpm_mask)] = smoothed[np.where(bpm_mask)]\n array_out = frame\n count_bp = np.sum(bpm_mask)\n \n if verbose:\n msg = \"/nDone replacing {} bad pixels using the median of neighbors\"\n print(msg.format(count_bp))\n timing(start)\n \n if full_output:\n return array_out, bpm_mask\n else:\n return array_out", "def stability_mask(board, period=6, remove_agent=True):\n if remove_agent:\n board = board * ((board & CellTypes.agent) == 0)\n\n neighborhood = np.ones((3,3))\n alive = (board & CellTypes.alive) // CellTypes.alive\n neighbors = ndimage.convolve(alive, neighborhood, mode='wrap')\n max_neighbors = neighbors\n ever_alive = alive\n orig_board = board\n for _ in range(period):\n board = speedups.advance_board(board)\n alive = (board & CellTypes.alive) // CellTypes.alive\n neighbors = ndimage.convolve(alive, neighborhood, mode='wrap')\n ever_alive |= alive\n max_neighbors = np.maximum(max_neighbors, neighbors)\n is_boundary = (board & CellTypes.frozen > 0)\n is_boundary |= (ever_alive == 0) & (max_neighbors <= 2)\n labels, num_labels = speedups.wrapped_label(~is_boundary)\n mask = np.zeros(board.shape, dtype=bool)\n for idx in range(1, num_labels+1):\n region = labels == idx\n if (board[region] == orig_board[region]).all():\n mask |= region\n return mask", "def addMasking(self):\n self.abundance_df['masked'] = [False]*len(self.abundance_df.index)\n self.abundance_df['colour'] = ['undefined']*len(self.abundance_df.index)", "def mask(self):\n return self._mask", "def mask(self):\n return self._mask", "def mask(self):\n return self._mask", "def mask(self):\n return self._mask", "def create_logits_mask_by_first_edge_graph(edge_indexes, num_edge, nvec):\n # find first edge\n # adj matrix of graphs\n # adj_mat = nx.to_numpy_matrix(graph, nodelist=range(nvec))[None]\n\n # bs = adj_mats.shape[0]\n # total_mask = []\n max_edge = edge_indexes.shape[0]\n total_mask = np.zeros(shape=(1, max_edge), dtype=np.int8)\n\n\n # edges = edge_indexes[:num_edge]\n # edges = np.where(adj_mats[i, :, :] > 0)\n\n # max_edge = max(max_edge, num_edges)\n # mask = np.zeros(shape=(1, max_edge), dtype=np.int8)\n all_half_edges = edge_indexes[:num_edge // 2] # only use the directed edge (a->b), not (b->a) # location[np.where(location[:, 0] < location[:, 1])[0]]\n all_valid_edges = edge_indexes[:num_edge] # only use the directed edge (a->b), not (b->a) # location[np.where(location[:, 0] < location[:, 1])[0]]\n\n # restore adj matrix\n rawobs = np.zeros(shape=(nvec, nvec), dtype=np.int8)\n for edge in all_valid_edges:\n rawobs[edge[0], edge[1]] = 1 # edge is an ndarray (2,), we cannot index using rawobs[edge] (is array with shape (2, 15))\n#\n for idx_1, edge_1 in enumerate(all_half_edges):\n encoded_edge_1 = idx_1\n # check if they are one-hop connected\n\n mask = np.zeros(shape=(max_edge,), dtype=np.int8)\n for idx_2, edge_2 in enumerate(all_valid_edges):\n\n fail_cond = edge_2[0] in edge_1 or edge_2[1] in edge_1 or\\\n int(rawobs[edge_2[0], edge_1[0]]) + int(rawobs[edge_2[0], edge_1[1]]) + \\\n int(rawobs[edge_2[1], edge_1[0]]) + int(rawobs[edge_2[1], edge_1[1]]) > 0\n\n mask[idx_2] = not fail_cond\n\n total_mask[0, encoded_edge_1] = mask.any()\n\n\n return total_mask", "def perform_noise_removal(mask):\n trans1 = cv.dilate(mask, KERNEL, iterations=4)\n trans1 = cv.erode(trans1, KERNEL, iterations=5)\n return cv.dilate(trans1, KERNEL, iterations=7)", "def segmentGraph(self, edges):\r\n\r\n sorted_edges = sorted(edges)\r\n\r\n for i in range(self._num_edges):\r\n a = self._operation.find(sorted_edges[i][1])\r\n b = self._operation.find(sorted_edges[i][2])\r\n if a != b:\r\n if sorted_edges[i][0] <= self._thresh[a] and sorted_edges[i][0] <= self._thresh[b]:\r\n self._operation.join(a, b)\r\n a = self._operation.find(a)\r\n self._thresh[a] = sorted_edges[i][0] + self.threshold_fn(self._operation.size(a))\r\n\r\n for i in range(self._num_edges):\r\n a = self._operation.find(sorted_edges[i][1])\r\n b = self._operation.find(sorted_edges[i][2])\r\n\r\n if a != b and (self._operation.size(a) < self._minSize or self._operation.size(b) < self._minSize):\r\n self._operation.join(a, b)\r\n\r\n # num = self._operation.num_sets()\r\n # print(num)\r\n\r\n colors = []\r\n for i in range(self._num_vertices):\r\n b = np.random.randint(0, 256)\r\n g = np.random.randint(0, 256)\r\n r = np.random.randint(0, 256)\r\n colors.append([b, r, g])\r\n\r\n dim = self._image.shape\r\n dst = self._image.copy()\r\n\r\n for y in range(dim[0]):\r\n for x in range(dim[1]):\r\n temp = self._operation.find(y * dim[1] + x)\r\n dst[y, x] = colors[temp]\r\n\r\n plt.figure(2)\r\n plt.imshow(dst)\r\n plt.show()", "def _mask(self, map_):\n return None", "def land_unmasked(res='4x5', debug=False):\n from .GEOSChem_bpch import get_LWI_map # Kludge, use GEOS-Chem LWI\n\n # Create a np.ma mask\n if debug:\n print(('land_mask called for: ', res))\n m = np.ma.masked_not_equal(get_LWI_map(res=res), 1)\n if debug:\n print((mask, mask.shape))\n return m.mask", "def setMask(self, mask):\n try:\n self.mask = mask\n self.inds = na.nonzero(self.mask.flat)[0]\n #print \"length of self.inds\",len(self.inds)\n #print self.inds\n self.dim = self.mask.shape[::-1]\n #print self.mask.shape\n return True\n except Exception as error:\n print(\"failed in setMask\", error)", "def edge_filter(edge):\n # <= because self loops\n idx, jdx = edge\n return ((not graph.has_edge(idx, jdx) or is_multigraph) and\n (idx <= jdx or is_directed) and\n (idx != jdx or self_loops))", "def clean_edges(self):\n for from_node in self.all_nodes():\n for to_node in self.all_nodes():\n if from_node == to_node:\n continue\n dup = list(filter(lambda x: x.from_node == from_node and x.to_node == to_node, self.edges))\n if len(dup) > 1:\n for d in dup[1:]:\n self.edges.remove(d)", "def get_mask(self, img):\n raise NotImplementedError()", "def sanitize_mask(orig_x, orig_y, mask):\n contours, hierarchy = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\n\n # Draw contours:\n cv2.drawContours(mask, contours, 0, (0, 255, 0), 2)\n # Calculate image moments of the detected contour\n num_objects = (len(contours))\n #threshold\n threshold = 3\n\n center_list = []\n # print(num_objects)\n if num_objects > 1:\n for item in range(num_objects):\n M = cv2.moments(contours[item])\n try:\n center_x = round(M['m10'] / M['m00'])\n center_y = round(M['m01'] / M['m00'])\n center_list.append([center_y , center_x ])\n except:\n pass\n\n # initialize retmask\n retmask = mask\n if num_objects > 1:\n for x, y in center_list:\n if orig_x - threshold <= x <= orig_x + threshold and orig_y - threshold <= y <= orig_y + threshold:\n pass\n else:\n def dfs_removal(px , py, mask):\n R = len(mask)\n C = len(mask[0])\n if mask[px][py ] != 255: \n return\n mask[px][py] = 0\n if 0 <= px - 1 and mask[px - 1][py ] == 255: dfs_removal(px - 1 , py , mask)\n if px + 1 < R and mask[px + 1][py ] == 255: dfs_removal(px + 1 , py , mask)\n if 0 <= py - 1 and mask[px][py - 1] == 255: dfs_removal(px, py -1 , mask)\n if py + 1 < C and mask[px][py + 1] == 255: dfs_removal(px, py + 1 , mask)\n\n dfs_removal(x,y, mask)\n\n return retmask", "def test_differentiable__mask_connectivity(self):\n data = random_graph_data(5, 4, 3)\n print(data.size)", "def outline_to_mask(line, x, y):\n mpath = mplp.Path(line)\n X, Y = np.meshgrid(x, y)\n points = np.array((X.flatten(), Y.flatten())).T\n mask = mpath.contains_points(points).reshape(X.shape)\n return mask", "def make_mask(data, pad):\n def subsequent_mask(size):\n \"\"\" helper function for creating the masks. \"\"\"\n attn_shape = (1, size, size)\n subsequent_mask = np.triu(np.ones(attn_shape), k=1).astype('uint8')\n return torch.from_numpy(subsequent_mask) == 0\n\n mask = (data != pad).unsqueeze(-2)\n mask = mask & Variable(\n subsequent_mask(data.size(-1)).type_as(mask.data))\n return mask", "def SuppressEdgeSet(self, *args):\n return _BRepAlgo.BRepAlgo_DSAccess_SuppressEdgeSet(self, *args)", "def unsharp_mask(img, kernel_size=(5, 5), sigma=1.0, amount=1.0, threshold=0):\r\n blurred = cv2.GaussianBlur(img, kernel_size, sigma)\r\n sharpened = float(amount + 1) * img - float(amount) * blurred\r\n sharpened = np.maximum(sharpened, np.zeros(sharpened.shape))\r\n sharpened = np.minimum(sharpened, 255 * np.ones(sharpened.shape))\r\n sharpened = sharpened.round().astype(np.uint8)\r\n if threshold > 0:\r\n low_contrast_mask = np.absolute(img - blurred) < threshold\r\n np.copyto(sharpened, img, where=low_contrast_mask)\r\n return sharpened", "def get_mask(self):\n\t\treturn pygame.mask.from_surface(self.img)", "def mask_stump(img, mask=None, mask_only=True):\n if img.ndim < 3:\n print('better to pass the raw image')\n channel = img\n else:\n channel = img[...,0]\n\n C = channel.copy()\n\n if mask is not None:\n C[mask] = 0\n elif ma.is_masked(img):\n C[img.mask] = 0\n mask = img.mask\n else:\n mask = np.zeros(img.shape, np.bool)\n\n thresh = (170/255)*C.max()\n b = ndi.white_tophat(C > thresh, 90)\n b = remove_small_objects(b, 1000)\n #b = convex_hull_object(b)\n b[mask] = 0\n\n # sort by size of object (largest first)\n\n# incl_count = 0 #objects used\n# mags = sorted(list(range(1,n_labs+1)), key=lambda lb: np.sum(labs==lb),\n# reverse=True)\n# print(mags)\n# for l in mags:\n# # big things get weird\n# if np.sum(b==l) > 5000:\n# print('skipping very large object')\n# # get rid of it, whatever\n# plt.imshow(b==l)\n# b[b==l] = 0\n# else:\n# incl_count += 1\n#\n# if incl_count > 4:\n# print('only removing a few things here')\n# b[b==l] = 0\n#\n# print('b after')\n# plt.imshow(b)\n# b = ndi.binary_dilation(b, disk(15))\n#\n if mask_only:\n return b\n\n else:\n # will add to existing mask\n return ma.masked_array(img, mask=b)", "def getCrossFormedGraphConstraintsPreventAnySwitch(self):\n makeLayer = self.makeLayer\n addNodeToLayer = self.addNodeToLayer\n eastWestEdgeFromTo = self.eastWestEdgeFromTo\n graph = self.graph\n setInLayerOrderConstraint = self.setInLayerOrderConstraint\n\n leftLayer = makeLayer(graph)\n rightLayer = makeLayer(graph)\n\n topLeft = addNodeToLayer(leftLayer)\n bottomLeft = addNodeToLayer(leftLayer)\n topRight = addNodeToLayer(rightLayer)\n bottomRight = addNodeToLayer(rightLayer)\n\n eastWestEdgeFromTo(topLeft, bottomRight)\n eastWestEdgeFromTo(bottomLeft, topRight)\n setInLayerOrderConstraint(topRight, bottomRight)\n setInLayerOrderConstraint(topLeft, bottomLeft)\n\n return graph" ]
[ "0.6540822", "0.6309648", "0.6181867", "0.59951514", "0.5955324", "0.5927942", "0.5888009", "0.5813421", "0.57741016", "0.5771686", "0.57375604", "0.57116854", "0.57070524", "0.570407", "0.57027954", "0.5653326", "0.56071967", "0.5605403", "0.55971795", "0.5577067", "0.5569969", "0.5561385", "0.5557007", "0.55552393", "0.55505466", "0.554305", "0.55306596", "0.55254203", "0.5482649", "0.54690164", "0.5463946", "0.5456474", "0.54510814", "0.5440067", "0.5439023", "0.543475", "0.54330826", "0.5425525", "0.54128325", "0.5396745", "0.537687", "0.5356361", "0.53354234", "0.5308943", "0.5292071", "0.52771133", "0.52716273", "0.5266219", "0.52623636", "0.5252317", "0.52480614", "0.52228224", "0.5222199", "0.5204081", "0.5196881", "0.5188078", "0.51767886", "0.51756674", "0.5163487", "0.51625514", "0.51588184", "0.515425", "0.5148723", "0.5142061", "0.5141871", "0.5128011", "0.51193035", "0.51151603", "0.51114917", "0.50983274", "0.50971", "0.5095984", "0.50892687", "0.50823134", "0.5080296", "0.5075198", "0.5070381", "0.5070377", "0.5069048", "0.5069048", "0.5069048", "0.5069048", "0.5068434", "0.50671303", "0.50638324", "0.5060844", "0.5059115", "0.50559527", "0.50477123", "0.50468487", "0.50400025", "0.5028762", "0.50207007", "0.50181514", "0.50173175", "0.5012222", "0.5011914", "0.5011197", "0.50110495", "0.5010222" ]
0.7204979
0
Given a node, visits the node using `visitor`. If removal is attempted by the visitor, an exception is raised.
def visit_required( parent: "CSTNode", fieldname: str, node: CSTNodeT, visitor: "CSTVisitorT" ) -> CSTNodeT: visitor.on_visit_attribute(parent, fieldname) result = node.visit(visitor) if isinstance(result, RemovalSentinel): raise TypeError( f"We got a RemovalSentinel while visiting a {type(node).__name__}. This " + "node's parent does not allow it to be removed." ) elif isinstance(result, FlattenSentinel): raise TypeError( f"We got a FlattenSentinel while visiting a {type(node).__name__}. This " + "node's parent does not allow for it to be it to be replaced with a " + "sequence." ) visitor.on_leave_attribute(parent, fieldname) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def removeNode(self, node):", "def depart(visitor: DocxTranslator, node: Node):\n assert isinstance(visitor, DocxTranslator)\n assert isinstance(node, Node)", "def depart(visitor: DocxTranslator, node: Node):\n assert isinstance(visitor, DocxTranslator)\n assert isinstance(node, Node)\n\n visitor.p = None", "def _del(self, handle=\"\", node=\"\", edge=\"\", subg=\"\"):\n head, tail = '', ''\n if edge:\n head, tail = edge\n\n node, head, tail, subg = map(encode_page, [node, head, tail, subg])\n\n self.changed = 1\n if head and tail:\n item = gv.findedge(gv.findnode(handle, head),\n gv.findnode(handle, tail))\n elif node:\n item = gv.findnode(handle, node)\n elif subg:\n item = gv.findsubg(handle, subg)\n elif handle:\n item = handle\n else:\n raise ValueError(\"No graph element or element type specified\")\n if item:\n gv.rm(item)", "def del_node (self, id):\n raise NotImplementedError", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\r\n neighbors = ugraph[node]\r\n ugraph.pop(node)\r\n for neighbor in neighbors:\r\n ugraph[neighbor].remove(node)", "def remove_node(self, node: str) -> None:\n self.graph.remove_node(node)", "def remove_node(self, node):\n # if the node is a part of the graph\n if node.get_name() in self.get_node_names():\n for edge in node.get_incident_edges(): # for every edge incident to the input node\n other_node = edge.get_other_node(node.get_name()) # get the other incident node object\n if other_node.get_name() in self.get_node_names(): # if the other node is a part of the graph\n self.remove_edge(tuple((node, other_node))) # remove the edge\n self.set_nodeset(\n set({\n vertex\n for vertex in self.get_nodeset()\n if not vertex.get_name().__eq__(node.get_name())\n })\n ) # remove the node from the graph's nodeset", "def removeNode(self, node: Node):\n if node in self.nodes:\n self.nodes.remove(node)\n else:\n print('!W', 'Scene:removeNode', 'wanna remove edge', node, 'from self.nodes but it is not in the list!')", "def _clean_graph_visit(self, node, visited):\n visited[node] = True\n\n while True:\n rp_node = None\n rp_id = -1\n for n_id, n in enumerate(node.get_children()):\n if n.get_type() == CFGNodeType.END_IF:\n rp_node = n\n rp_id = n_id\n break\n\n # end node points to only one child,\n # so replace it\n if rp_node is not None and rp_node.get_children() != []:\n node.get_children()[rp_id] = rp_node.get_children()[0]\n\n # END-IF can be replaced by another, so continue until there's none\n if rp_node == None:\n break\n\n if node.get_type() == CFGNodeType.PSEUDO:\n self._clean_graph_visit(node.get_refnode(), visited)\n\n for child in node.get_children():\n if child not in visited:\n self._clean_graph_visit(child, visited)", "def depart(visitor: DocxTranslator, node: Node):\n assert isinstance(visitor, DocxTranslator)\n assert isinstance(node, Node)\n\n visitor.p = None\n visitor.p_parents.pop()\n visitor.tables[-1][2] += 1", "def remove_node(self, node):\n self.nodes.remove(node)\n node.close()", "def _delete_node(self, node):\n predecessor = node._prev\n successor = node._next\n predecessor._next = successor\n successor._prev = predecessor\n self._size -= 1\n element = node._element # record deleted element\n node._prev = node._next = node._element = None # deprecate node\n return element # return deleted element", "def depart(visitor: DocxTranslator, node: Node):\n assert isinstance(visitor, DocxTranslator)\n assert isinstance(node, Node)\n\n visitor.r = None\n visitor.r_style = None", "def generic_visit(self, node):\n raise Exception('No visit_{} method'.format(type(node).__name__))", "def delete_node(self, n):\n\n if n not in self.node:\n raise PathGraphException(\"The node {} is not in the graph.\".format(n))\n\n self.delete_node_from_path(n)\n self.delete_path_containing_node(n)\n del self.node[n]", "def delete_node(self, node: 'GraphNode'):\n\n self.operator.delete_node(node)", "def remove_node(self, node_key: NodeKey) -> Node:", "def remove_node(self, node):\n self.nodes[node.name] = node\n self.dirty = True", "def delete_node(self, node):\n return node.delete()", "def _delete_node(self, node):\n predecessor = node._prev\n successor = node._next\n predecessor._next = successor\n successor._prev = predecessor\n self._size -= 1\n element = node._element # record deleted element\n node._prev = node._next = node._element = None # deprecate node\n return element # return deleted element", "def remove(self, node):\n parent = node.getparent()\n if parent is None:\n return\n parent.remove(node)", "def _un_onnode(visitor, node, namespace):\n namespace.refresh(node._qname, node)\n visitor.mapacc(node._children, node._namespace)\n return node, namespace", "def del_node (self, node):\n try:\n if isinstance(node, Node):\n node = node.id\n elif isinstance(node, Port):\n node = node.node.id\n self.network.remove_node(node)\n return True\n except NetworkXError:\n # There was no node in the graph\n return False", "def remove(self, node):\r\n\r\n for n, conns in self._graph.items(): # python3: items(); python2: iteritems()\r\n try:\r\n conns.remove(node)\r\n except KeyError:\r\n pass\r\n try:\r\n del self._graph[node]\r\n except KeyError:\r\n pass", "def remove_node(self, id):\r\n\t\tif id in self._nodes:\r\n\t\t\tnode = self._nodes[id]\r\n\t\t\tedges = node.edges()\r\n\t\t\t# ugly can maybe fix it up with sets\r\n\t\t\tfor edge in edges:\r\n\t\t\t\tlabel = edge.label\r\n\t\t\t\tdel edge.start_node._edges[label]\r\n\t\t\t\tdel edge.end_node._edges[label]\r\n\t\t\t\tdel self._edges[edge.id]\r\n\t\t\tdel self._nodes[id]\r\n\t\telse:\r\n\t\t\t# return a real exception someday\r\n\t\t\tprint('Error: Cannot remove node since id does not exist')", "def _remove_node(self, node):\n prev = node.prev\n new = node.next\n\n prev.next = new\n new.prev = prev", "def _remove_node(self, node):\n previous = node.prev\n next_node = node.next\n\n previous.next = next_node\n next_node.prev = previous", "def del_node(node, delnum):\n pass", "def delete_node(self, node):\n return self.manager.delete_node(self, node)", "def ignore_visit_interslide(self, node):\n raise SkipNode", "def invisible_visit(self, node: Node) -> None:\n pass", "def remove_node(self, node):\n self.nodes.pop(self.nodes.index(node))\n node1 = node.neighbour1\n node2 = node.neighbour2\n node1.neighbour2 = node2\n node2.neighbour1 = node1", "def remove(self, node):\n curr, prev = self.find(node, inc_prev=True)\n if curr:\n self._remove(curr, prev)", "def remove_edges(self, node: NodeKey) -> Edge:", "def remove_node(self, key) -> Node:", "def remove( self, node ):\n parent_node = self\n if parent_node:\n try:\n pos = parent_node.children.index( node )\n del parent_node.children[ pos ]\n if node.previousSiblingNode and node.nextSiblingNode:\n node.previousSiblingNode.nextSiblingNode = node.nextSiblingNode\n node.nextSiblingNode.previousSiblingNode = node.previousSiblingNode\n except ValueError:\n raise Exception( str( node ) + \": node is not a children of the parent node\" )\n \n return node", "def remove_node(self, node):\n try:\n self._nodes.remove(node)\n del self._weights[node]\n except (KeyError, ValueError):\n pass\n self._rebuild_circle()", "def _delete_node(self, node):\n predecessor = node._previous\n successor = node._next\n predecessor._next = successor\n successor._previous = predecessor\n self._size -= 1\n element = node._element # store deleted node element for returning later\n node._previous = node._next = node._element = None # helps garbage collection\n return element", "def unknown_visit(self, node):\n pass", "def del_node(self, n):\n if n in self.node_dict:\n del self.node_dict[n]\n for node in self.node_dict:\n try:\n self.del_edge(node, n)\n except:\n pass\n else:\n raise KeyError(\"Cannot remove node that does not exist.\")", "def generic_visit(self, node):\n\n visit_method_name = 'visit_' + node.__class__.__name__\n if hasattr(self, visit_method_name):\n method = getattr(self, visit_method_name)\n method(node)\n\n return node", "def remove_node_from_graph(ugraph, node):\n neighbors = ugraph[node]\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)\n ugraph.pop(node)\n return ugraph", "def mutate(cls, node):\n if node not in config.visited_nodes:\n if node.__class__ in [ast.Raise, ast.Assign, ast.AugAssign, ast.Call, ast.Expr] and node in config.nodes_to_remove:\n config.mutated = True\n original_node = deepcopy(node)\n parent = config.parent_dict[node]\n del config.parent_dict[node]\n config.nodes_to_remove.remove(node)\n node = ast.Pass()\n config.parent_dict[node] = parent\n config.node_pairs[node] = original_node\n config.current_mutated_node = node\n\n return node", "def delete_node(self, name):\n\n name = self._validate_name(name)\n if name in self.nodes:\n del self.nodes[name]", "def remove_node(self, node):\n\t\tnode.close()\n\t\taddress = (node.server_ip, node.server_port)\n\t\tself.nodes.pop(address)", "def del_node(self, n):\n try:\n del self.dict[n]\n # remove edges pointing to n\n for key, value in self.dict.iteritems():\n if n in value:\n del self.dict[key][n]\n except (ValueError, KeyError):\n raise AttributeError('No Such Node Exists')", "def remove_node(self, n):\r\n keys = self.d.keys()\r\n #check for node in graph\r\n if n not in keys:\r\n raise KeyError(str(n) + \" is not in graph\")\r\n self.d.pop(n)\r\n #discard each occurence of node in the values of others\r\n for k in keys:\r\n edges = self.d[k]\r\n new = edges.discard(n)", "def remove(self,node,verbose=False):\n for label,parent in node.incoming:\n parent.outgoing.remove((label,node))\n for label,child in node.outgoing:\n child.incoming.remove((label,node))\n self.pop(node.nodeid)\n for x,y in copy(self.undirected):\n if x == node or y == node:\n self.undirected.remove((x,y))\n if self.root == node:\n self.root = None\n if verbose: print('removed',node)", "def erase_node(node: Node):\n graph = node.graph\n node_id = node.id\n\n inputs = list(graph.in_edges(node_id, data=True))\n outputs = list(graph.out_edges(node_id, data=True))\n\n assert not (len(inputs) > 1 and len(outputs) > 1)\n\n if len(outputs) == 0 and len(inputs) != 0:\n for input, _, attrs in inputs:\n if Node(graph, node_id).has_and_set('is_output'):\n if graph.node[input]['kind'] == 'op':\n data_nodes = [u for u, v in graph.in_edges(input)]\n for data in data_nodes:\n graph.node[data]['is_output'] = graph.node[node_id]['is_output']\n else:\n graph.node[input]['is_output'] = graph.node[node_id]['is_output']\n\n if len(outputs) == 0 or len(inputs) == 0:\n graph.remove_node(node_id)\n return\n\n if len(outputs) == 1:\n output = outputs[0][1]\n for src, noop, attrs in inputs:\n graph.remove_edge(src, noop)\n graph.add_edge(src, output, **attrs)\n graph.remove_node(node_id)\n return\n\n if len(inputs) == 1:\n input = inputs[0][0]\n for noop, dst, attrs in outputs:\n graph.remove_edge(noop, dst)\n graph.add_edge(input, dst, **attrs)\n graph.remove_node(node_id)\n return", "def remove_node(self, node_address):\n node = self.find_node(node_address[0], node_address[1])\n if node is None:\n raise ValueError\n else:\n for child_node in node.get_subtree():\n child_node.set_dead()\n\n node.remove_from_parent()\n node.parent = None\n node.set_dead()\n # self.nodes.remove(node)\n pass", "def visit_Node(self, node):\n pass", "def get_visitor(self, node):\r\n method = 'visit_' + node.__class__.__name__\r\n return getattr(self, method, None)", "def remove_node(self, node):\n affected_nodes = [v for u, v in self.edges() if u == node]\n\n for affected_node in affected_nodes:\n node_cpd = self.get_cpds(node=affected_node)\n if node_cpd:\n node_cpd.marginalize([node], inplace=True)\n\n if self.get_cpds(node=node):\n self.remove_cpds(node)\n super(BayesianModel, self).remove_node(node)", "def _delete_node(self, node):\n\n if self.is_empty():\n raise Empty(\"List is empty!\")\n\n predecessor = node._prev\n successor = node._next\n\n predecessor._next = successor\n successor._prev = predecessor\n\n elem = node._element\n node._prev = node._next = node._element = None\n\n self._size -= 1\n\n return elem", "def remove_from_node(node, child_to_remove):\n\n children_names = list(History.children_names(node))\n try:\n child_to_remove_index = children_names.index(child_to_remove)\n result = History.children(History.children(node)[child_to_remove_index]), True\n del History.children(node)[child_to_remove_index]\n return result\n except ValueError:\n children_call_result = [History.remove_from_node(child_node, child_to_remove)\n for child_node in History.children(node)]\n return reduce(lambda l, r: l if snd(l) else r,\n children_call_result, ([], False))", "def delete(node):\n try:\n if os.path.isdir(node):\n shutil.rmtree(node)\n else:\n os.unlink(node)\n except OSError as error:\n if error.errno not in [errno.ENOENT, errno.EPERM, errno.EACCES]:\n raise error", "def test_graph_cant_delete_an_unpresent_node(graph_no_edges):\n with pytest.raises(ValueError):\n graph_no_edges.del_nodes(3.14)", "def prune_node(self, node, remove_backrefs=False):\n self.nodes = [x for x in self.nodes if x != node]\n if node in self.edges:\n # Remove add edges from this node if we're pruning it.\n self.edges.pop(node)\n\n for fro, connections in self.edges.items():\n # Remove any links to this node (if they exist)\n if node in self.edges[fro]:\n if remove_backrefs:\n # If we should remove backrefs:\n self.edges[fro].remove(node)\n else:\n # Let's raise an Exception\n raise ValueError(\"\"\"Attempting to remove a node with\n backrefs. You may consider setting\n `remove_backrefs` to true.\"\"\")", "def removeNode(cTag, nTag): #@NoSelf", "def visit(self, node):", "def visit(self, node):", "def visit(self, node):\n method_name = 'visit_' + type(node).__name__\n visit_method = getattr(self, method_name, self.generic_visit)\n return visit_method(node)", "def del_node(self, node_id):\n assert(node_id is not None)\n LOG.info(\"Try to del node=%s\" % node_id)\n\n try:\n enet = EnhNetNode(node_id)\n self.info.nodeDel(enet.ident)\n LOG.debug(\"Successfully deleted node: %s\", str(enet))\n\n except TOPOLOGY.CannotFetchNode, exe:\n LOG.error(\"CannotFetchNode exception: %s\", str(exe))\n except TOPOLOGY.InternalProblems, exe:\n LOG.error(\"InternalProblems exception: %s\", str(exe))\n except TOPOLOGY.InvocationNotAllowed, exe:\n LOG.error(\"InvocationNotAllowed exception: %s\", str(exe))\n except Exception, exe:\n LOG.error(\"Generic exception: %s\", str(exe))", "def remove(self, node):\n if type(node) is Node:\n prev = None\n curr = self.head\n while curr:\n if curr is node:\n if prev:\n prev.next = curr.next\n else:\n self.head = curr.next\n self._length -= 1\n break\n prev = curr\n curr = curr.next\n else:\n raise ValueError(\"Cannot remove node not in list.\")\n else:\n raise ValueError(\"Argument to remove must be of node type.\")", "def _internalRemove(self, node):\n\t\tif not node:\n\t\t\treturn None\n\n\t\t#Case 1 - node is a leaf\n\t\tif (not node.lchild() and not node.rchild()):\n\t\t\tprint str(node.value()) + \": 1\"\n\t\t\tif (node is self._root):\n\t\t\t\tself._root = None\n\t\t\t\treturn node.value()\n\t\t\tif node.islchild():\n\t\t\t\tnode.parent().setlchild()\n\t\t\telse:\n\t\t\t\tnode.parent().setrchild()\n\t\t\treturn node.value()\n\n\t\t#Case 2 - node has only 1 child\n\t\tif (bool(node.lchild()) != bool(node.rchild())): #basically an XOR\n\t\t\tprint str(node.value()) + \": 2\"\n\t\t\tif node.lchild():\n\t\t\t\tif (node is self._root):\n\t\t\t\t\tself._root = node.lchild()\n\t\t\t\t\treturn node.value()\n\t\t\t\telse:\n\t\t\t\t\tif node.islchild():\n\t\t\t\t\t\tnode.parent().setlchild(node.lchild())\n\t\t\t\t\telse:\n\t\t\t\t\t\tnode.parent().setrchild(node.lchild())\n\t\t\t\t\treturn node.value()\n\t\t\telse:\n\t\t\t\tif (node is self._root):\n\t\t\t\t\tself._root = node.rchild()\n\t\t\t\t\treturn node.value()\n\t\t\t\telse:\n\t\t\t\t\tif node.islchild():\n\t\t\t\t\t\tnode.parent().setlchild(node.rchild())\n\t\t\t\t\telse:\n\t\t\t\t\t\tnode.parent().setrchild(node.rchild())\n\t\t\t\t\treturn node.value()\n\n\t\t#case 3 - node has 2 children\n\t\t#find minimum element in right subtree, switch data\n\t\t#delete the node that had the minimum element\n\t\tif (node.lchild() and node.rchild()):\n\t\t\tprint str(node.value()) + \": 3\"\n\t\t\tminele = node.rchild()\n\t\t\twhile minele.lchild():\n\t\t\t\tminele = minele.lchild()\n\t\t\ttemp = node.value()\n\t\t\tnode.setvalue(minele.value())\n\t\t\tminele.setvalue(temp)\n\t\t\tself._internalRemove(minele)\n\t\t\treturn node.value()", "def remove_node():\n\ttry:\n\t\tnetwork.remove_connection()\n\texcept ValueError as err:\n\t\tfeedback.config(text=err)", "def delete_node(self, loadbalancer, node):\n lb = node.parent\n if not lb:\n raise exc.UnattachedNode(\"No parent Load Balancer for this node \"\n \"could be determined.\")\n resp, body = self.api.method_delete(\"/loadbalancers/%s/nodes/%s\" %\n (lb.id, node.id))\n return resp, body", "def visit(self, node):\n name = 'visit_%s' % node.__class__.__name__\n try:\n method = getattr(self, name)\n except AttributeError:\n method = self.default_visit\n method(node)", "def visit(self, node):\n name = 'visit_%s' % node.__class__.__name__\n try:\n method = getattr(self, name)\n except AttributeError:\n method = self.default_visit\n method(node)", "def delete_node(name: str, value: str) -> None:\n global _graph\n\n if _graph is None:\n print('\\ndelete_node(): Error: graph has not been initialized or opened.\\n\\n')\n return\n\n lname = str(name)\n lvalue = str(value)\n\n if lname == '' or lvalue == '' or lname == 'nan' or lvalue == 'nan':\n return\n\n node = read_node(name=lname, value=lvalue)\n if node is None:\n return\n\n _graph.delete(node)\n return", "def depart_solution_node_(self, node):\n raise NotImplemented\n self.depart_admonition(node)", "def isolate_node(self, node_id: int,\n effectiveness: float = 0.95) -> None:\n node = self.g_.nodes[node_id]\n node[\"isolated\"] = True\n\n # Select edges to remove\n to_remove = []\n for uv in self.g_.edges(node_id):\n if self._random_state.binomial(1, effectiveness):\n to_remove.append(uv)\n\n # Do NOT deepcopy EdgeView!!! Copy won't work either.\n node[\"_edges\"] += to_remove\n\n self.g_.remove_edges_from(to_remove)", "def remove_node(self,node):\n if node.left.node.right: #Node has two children.\n #Find its in order successor\n successor = node.right\n while successor.left:\n successor = successor.left\n #copy the node\n node.key = successor.key\n node.value = successor.value\n #remove the successor\n self.remove_node(successor)\n elif node.left: #The node only has a left child.\n self.replace_node(node,node.left)\n elif node.right: #The node only has a right child.\n self.replace_node(node,node.right)\n else:\n self.replace_node(node, None)", "def delNode(nodeName):\n\t\tslicer.util.getNode(nodeName)\n\t\tslicer.mrmlScene.RemoveNode(slicer.util.getNode(nodeName))\n\t\treturn", "def delete_node(self, node):\n curr = self.head\n while curr.next is not None:\n if curr.next == node:\n break\n curr = curr.next\n curr.next = node.next\n node = None\n return", "def del_node_from_string(self, node):\n assert(node is not None)\n LOG.info(\"Try to del node=%s\" % node)\n\n try:\n net = NetNode(node)\n self.info.nodeDel(net.ident)\n LOG.debug(\"Successfully deleted node: %s\", str(net))\n\n except TOPOLOGY.CannotFetchNode, exe:\n LOG.error(\"CannotFetchNode exception: %s\", str(exe))\n except TOPOLOGY.InternalProblems, exe:\n LOG.error(\"InternalProblems exception: %s\", str(exe))\n except TOPOLOGY.InvocationNotAllowed, exe:\n LOG.error(\"InvocationNotAllowed exception: %s\", str(exe))\n except Exception, exe:\n LOG.error(\"Generic exception: %s\", str(exe))", "def deletenode(self, node_p=None):\n node_p = self.getnodenamed(node_p) # Verify pointer.\n # (node_bn* node)\n cnetica.DeleteNode_bn.argtypes = [c_void_p]\n cnetica.DeleteNode_bn.restype = None\n cnetica.DeleteNode_bn(node_p)", "def remove_node(self, node_id: int) -> bool:\r\n # check if key exists in dictionary by checking if get() returned default value\r\n if len(self.Edges[node_id])!=0:\r\n self.edgeSize -=len(self.Edges[node_id])\r\n del self.Edges[node_id]\r\n del self.Edges_In[node_id]\r\n #should do iteration on every node and check if this node_id is also neighbor there\r\n for key, value in self.Edges.items():\r\n if node_id in value:\r\n self.edgeSize -= 1\r\n del value[node_id]\r\n del self.Nodes[node_id]\r\n self.mc +=1\r\n return True\r\n raise NotImplementedError", "def remove_node(self, node):\n if node.running():\n raise RuntimeError(\"Refusing to remove Node while it is running.\")\n self.nodes.remove(node)", "def del_edge (self, src, dst):\n raise NotImplementedError", "def remove_peer_node(self, node):\n self.__peer_nodes.discard(node)\n self.save_data()", "def remove_peer_node(self, node):\n self.__peer_nodes.discard(node)\n self.save_data()", "def generic_visit(self, node: ast.AST) -> None:", "def remove(self, node, remove_edges_only=False):\n if not remove_edges_only:\n for related_edges in self._graph.values():\n try:\n related_edges.pop(node)\n except KeyError:\n pass\n try:\n del self._graph[node]\n except KeyError:\n pass", "def node_remove(self, node, update_statistics_ancestors_depth=None):\n\n if self.node_count_children(node):\n return False\n\n mtime = time()\n q = (\"select count(serial), sum(size), cluster \"\n \"from versions \"\n \"where node = ? \"\n \"group by cluster\")\n self.execute(q, (node,))\n for population, size, cluster in self.fetchall():\n self.statistics_update_ancestors(\n node, -population, -size, mtime, cluster,\n update_statistics_ancestors_depth)\n\n q = \"delete from nodes where node = ?\"\n self.execute(q, (node,))\n return True", "def delete_node_from_path(self, n):\n if n not in self.node:\n raise PathGraphException(\"The node {} is not in the graph.\".format(n))\n\n if n not in self.path_id:\n return\n path_id = self.path_id[n]\n\n if len(self.path[path_id]) == 1:\n assert self.adj[n] == {}\n del self.path[n]\n return\n\n idx_n = self.path[path_id].index(n)\n new_path_left = self.path[path_id][:idx_n]\n new_path_right = self.path[path_id][idx_n+1:]\n\n # delete original path\n self.delete_path_containing_node(n)\n if not new_path_left:\n self.add_path(new_path_right, name=path_id)\n elif not new_path_right:\n self.add_path(new_path_left, name=path_id)\n else:\n new_name_left, new_name_right = PathGraph.new_split_path_names(path_id)\n self.add_path(new_path_left, name=new_name_left)\n self.add_path(new_path_right, name=new_name_right)\n del self.adj[n]", "def delete_node(self, node):\r\n\r\n # if node is loose LEAF, just delete the node and tell its parent its child is gone\r\n if not node.rightchild and not node.leftchild:\r\n if node == node.parent.rightchild:\r\n node.parent.rightchild = None\r\n if node == node.parent.leftchild:\r\n node.parent.leftchild = None\r\n # if node has ONE CHILD, being left: just delete the node and tell its parent is node's left child\r\n if not node.rightchild and node.leftchild:\r\n if node == node.parent.rightchild:\r\n node.parent.rightchild = node.leftchild\r\n if node == node.parent.leftchild:\r\n node.parent.leftchild = node.leftchild\r\n # if node has ONE CHILD, being right: just delete the node and tell its parent is node's right child\r\n if node.rightchild and not node.leftchild:\r\n if node == node.parent.rightchild:\r\n node.parent.rightchild = node.rightchild\r\n if node == node.parent.leftchild:\r\n node.parent.leftchild = node.rightchild\r\n # if node has TWO CHILDREN: swap node with the one containing the inorder successor, then solve the problem from\r\n # there by trying to delete that node (which is a recursive call)\r\n if node.rightchild and node.leftchild:\r\n swapnode = self.get_inorder_successor_from_right_part_tree(node.rightchild)\r\n temp = node.object\r\n node.object = swapnode.object\r\n swapnode.object = temp\r\n self.delete_node(swapnode)", "def delete_node(self,n):\n if self._node_to_edges is not None:\n if len(self._node_to_edges[n])>0:\n print( \"Node %d has edges: %s\"%(n,self._node_to_edges[n]) )\n raise GridException(\"Node still has edges referring to it\")\n del self._node_to_edges[n]\n if self._node_to_cells is not None:\n if len(self._node_to_cells[n])>0:\n raise GridException(\"Node still has cells referring to it\")\n del self._node_to_cells[n]\n if self._node_index is not None:\n self._node_index.delete(n, self.nodes['x'][n,self.xxyy] )\n\n self.push_op(self.undelete_node,n,self.nodes[n].copy())\n\n self.nodes['deleted'][n] = True\n \n # special case, used for undo, reverts to previous state\n # more completely.\n if len(self.nodes)==n+1:\n self.nodes=self.nodes[:-1]", "def removeNode(self, index):\n del self.nodes[index]", "def visit(self, node: AstNode):\n raise CASTTypeError(f\"Unrecognized node type: {type(node)}\")", "def depart_exercise_node_(self, node):\n raise NotImplemented\n self.depart_admonition(node)", "def default_visit(self, node):\n raise ValueError('Unhandled Node %s.' % node)", "def default_visit(self, node):\n raise ValueError('Unhandled Node %s.' % node)", "def remove_node(self, node):\n \n try:\n num_of_edge = len(self.prefix[node]) + len(self.suffix[node])\n self.node_set.remove(node)\n \n # remove edge associated with the node\n for key in self.prefix[node]:\n \n self.suffix[key].pop(node)\n \n for key in self.suffix[node]:\n \n self.prefix[key].pop(node)\n \n self.prefix.pop(node)\n self.suffix.pop(node)\n \n self.num_node -= 1\n self.edges -= num_of_edge\n \n except:\n print(\"ERROR: No node found.\")", "def accept(visitor):" ]
[ "0.66534734", "0.6408771", "0.6175824", "0.61187774", "0.60823214", "0.60546935", "0.60546935", "0.60546935", "0.60546935", "0.60546935", "0.6018935", "0.6014195", "0.6013895", "0.59864545", "0.59284323", "0.59193057", "0.5896044", "0.589552", "0.58731836", "0.5853483", "0.5843655", "0.58229464", "0.5785259", "0.57741636", "0.57630396", "0.576024", "0.57005966", "0.56995887", "0.56758964", "0.567217", "0.56622183", "0.56427395", "0.5625126", "0.56223947", "0.5617088", "0.5606008", "0.55536634", "0.55513614", "0.55393517", "0.5513833", "0.54942185", "0.5472184", "0.5449905", "0.54416364", "0.5429377", "0.54221594", "0.54064685", "0.54015803", "0.5399624", "0.5393035", "0.53918", "0.5383281", "0.538218", "0.537601", "0.5372412", "0.5367928", "0.53543204", "0.5341882", "0.5326038", "0.5282282", "0.52757347", "0.52696836", "0.5262295", "0.5259142", "0.5245202", "0.5239712", "0.5239712", "0.5237835", "0.52120066", "0.521135", "0.51975936", "0.5187155", "0.5184404", "0.5177018", "0.5177018", "0.5166066", "0.5164493", "0.51465553", "0.5131511", "0.51308924", "0.51261526", "0.5125054", "0.5121657", "0.5120046", "0.5114016", "0.5106783", "0.51050943", "0.51050943", "0.5101882", "0.50996256", "0.5093736", "0.5091568", "0.50849426", "0.5081451", "0.50814325", "0.5069193", "0.5048525", "0.5039363", "0.5039363", "0.50295407", "0.50187284" ]
0.0
-1
Given an optional node, visits the node if it exists with `visitor`. If the node is removed, returns None.
def visit_optional( parent: "CSTNode", fieldname: str, node: Optional[CSTNodeT], visitor: "CSTVisitorT" ) -> Optional[CSTNodeT]: if node is None: visitor.on_visit_attribute(parent, fieldname) visitor.on_leave_attribute(parent, fieldname) return None visitor.on_visit_attribute(parent, fieldname) result = node.visit(visitor) if isinstance(result, FlattenSentinel): raise TypeError( f"We got a FlattenSentinel while visiting a {type(node).__name__}. This " + "node's parent does not allow for it to be it to be replaced with a " + "sequence." ) visitor.on_leave_attribute(parent, fieldname) return None if isinstance(result, RemovalSentinel) else result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def depart(visitor: DocxTranslator, node: Node):\n assert isinstance(visitor, DocxTranslator)\n assert isinstance(node, Node)\n\n visitor.p = None", "def get_visitor(self, node: Node) -> \"t.Optional[VisitCallable]\":\n return getattr(self, f\"visit_{type(node).__name__}\", None)", "def or_none(cls, node):\n return node if isinstance(node, cls) else None", "def depart(visitor: DocxTranslator, node: Node):\n assert isinstance(visitor, DocxTranslator)\n assert isinstance(node, Node)", "def removeNode(self, node):", "def getOptionalNode(node, name, option=None):\n try:\n return getNode(node, name)\n except NodeError:\n return option", "def remove_node(self, node_key: NodeKey) -> Node:", "def delete_node(head, nodetodelete):\n\n #Nothing to do if the head is None or the nodetodelete is None\n if not head and not nodetodelete:\n print 'Nothing to delete. No arguments passed in'\n return\n\n #is the nodetodelete the head node, deal with that.\n node=head\n\n while(node.next):\n if(node.next == nodetodelete):\n if(node.next.next == None):\n node.next = None\n else:\n node.next = node.next.next\n\n #After finding the relevant nodetodelete, break out of the loop\n break\n node = node.next", "def depart(visitor: DocxTranslator, node: Node):\n assert isinstance(visitor, DocxTranslator)\n assert isinstance(node, Node)\n\n visitor.r = None\n visitor.r_style = None", "def find(node, url):\r\n if node.location.to_deprecated_string() == url:\r\n return node\r\n for child in node.get_children():\r\n found = find(child, url)\r\n if found:\r\n return found\r\n return None", "def getXpathOptionalNode(elem, xpath):\n valueElems = elem.xpath(xpath)\n if len(valueElems) > 0:\n return valueElems[0]\n return None", "def _purgeNode(self, data):\n\t\tq = c3.Queue()\n\t\tq.enqueue(self._root)\n\t\twhile q.peek():\n\t\t\tnode = q.dequeue()._data\n\t\t\tif node.value() == data:\n\t\t\t\treturn node\n\t\t\tif node.lchild():\n\t\t\t\tq.enqueue(node.lchild())\n\t\t\tif node.rchild():\n\t\t\t\tq.enqueue(node.rchild())\n\t\treturn None", "def find_live_node(self, sender):\n if self.find_node(sender[0], sender[1]) is not None:\n return None\n node_list: list[GraphNode] = [self.root]\n while node_list:\n for node in node_list:\n if len(node.children) < 2: # is this syntax right?!\n return node\n node_list += node.children\n node_list.remove(node)\n pass", "def depart(visitor: DocxTranslator, node: Node):\n assert isinstance(visitor, DocxTranslator)\n assert isinstance(node, Node)\n\n visitor.p = None\n visitor.p_parents.pop()\n visitor.tables[-1][2] += 1", "def get_visitor(self, node):\r\n method = 'visit_' + node.__class__.__name__\r\n return getattr(self, method, None)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def removeNode(cTag, nTag): #@NoSelf", "def remove_node(self, key) -> Node:", "def delete_node(ugraph, node):\r\n neighbors = ugraph[node]\r\n ugraph.pop(node)\r\n for neighbor in neighbors:\r\n ugraph[neighbor].remove(node)", "def delete_node(self, node):\n return node.delete()", "def remove(self, data):\n\n traverse = self.head\n temp = self.head\n if self.head == None:\n return None\n\n if traverse.data == data:\n self.head = traverse.next\n return\n\n while traverse.next != None:\n\n temp = traverse.next\n if temp.data == data:\n traverse.next = temp.next\n return\n\n traverse = traverse.next", "def invisible_visit(self, node: Node) -> None:\n pass", "def del_node (self, id):\n raise NotImplementedError", "def remove(self, node):\n parent = node.getparent()\n if parent is None:\n return\n parent.remove(node)", "def remove_node(self, node: str) -> None:\n self.graph.remove_node(node)", "def _clean_graph_visit(self, node, visited):\n visited[node] = True\n\n while True:\n rp_node = None\n rp_id = -1\n for n_id, n in enumerate(node.get_children()):\n if n.get_type() == CFGNodeType.END_IF:\n rp_node = n\n rp_id = n_id\n break\n\n # end node points to only one child,\n # so replace it\n if rp_node is not None and rp_node.get_children() != []:\n node.get_children()[rp_id] = rp_node.get_children()[0]\n\n # END-IF can be replaced by another, so continue until there's none\n if rp_node == None:\n break\n\n if node.get_type() == CFGNodeType.PSEUDO:\n self._clean_graph_visit(node.get_refnode(), visited)\n\n for child in node.get_children():\n if child not in visited:\n self._clean_graph_visit(child, visited)", "def removeNode(node, head, tail) :\n if node is head :\n node = None\n return (None, None)\n if node is tail :\n tail = tail.prev\n tail.next = None\n node.prev = None\n return (head, tail)\n node.prev.next = node.next\n node.next.prev = node.prev\n node.next = None\n node.prev = None\n node = None\n return (head, tail)", "def _del(self, handle=\"\", node=\"\", edge=\"\", subg=\"\"):\n head, tail = '', ''\n if edge:\n head, tail = edge\n\n node, head, tail, subg = map(encode_page, [node, head, tail, subg])\n\n self.changed = 1\n if head and tail:\n item = gv.findedge(gv.findnode(handle, head),\n gv.findnode(handle, tail))\n elif node:\n item = gv.findnode(handle, node)\n elif subg:\n item = gv.findsubg(handle, subg)\n elif handle:\n item = handle\n else:\n raise ValueError(\"No graph element or element type specified\")\n if item:\n gv.rm(item)", "def _internalRemove(self, node):\n\t\tif not node:\n\t\t\treturn None\n\n\t\t#Case 1 - node is a leaf\n\t\tif (not node.lchild() and not node.rchild()):\n\t\t\tprint str(node.value()) + \": 1\"\n\t\t\tif (node is self._root):\n\t\t\t\tself._root = None\n\t\t\t\treturn node.value()\n\t\t\tif node.islchild():\n\t\t\t\tnode.parent().setlchild()\n\t\t\telse:\n\t\t\t\tnode.parent().setrchild()\n\t\t\treturn node.value()\n\n\t\t#Case 2 - node has only 1 child\n\t\tif (bool(node.lchild()) != bool(node.rchild())): #basically an XOR\n\t\t\tprint str(node.value()) + \": 2\"\n\t\t\tif node.lchild():\n\t\t\t\tif (node is self._root):\n\t\t\t\t\tself._root = node.lchild()\n\t\t\t\t\treturn node.value()\n\t\t\t\telse:\n\t\t\t\t\tif node.islchild():\n\t\t\t\t\t\tnode.parent().setlchild(node.lchild())\n\t\t\t\t\telse:\n\t\t\t\t\t\tnode.parent().setrchild(node.lchild())\n\t\t\t\t\treturn node.value()\n\t\t\telse:\n\t\t\t\tif (node is self._root):\n\t\t\t\t\tself._root = node.rchild()\n\t\t\t\t\treturn node.value()\n\t\t\t\telse:\n\t\t\t\t\tif node.islchild():\n\t\t\t\t\t\tnode.parent().setlchild(node.rchild())\n\t\t\t\t\telse:\n\t\t\t\t\t\tnode.parent().setrchild(node.rchild())\n\t\t\t\t\treturn node.value()\n\n\t\t#case 3 - node has 2 children\n\t\t#find minimum element in right subtree, switch data\n\t\t#delete the node that had the minimum element\n\t\tif (node.lchild() and node.rchild()):\n\t\t\tprint str(node.value()) + \": 3\"\n\t\t\tminele = node.rchild()\n\t\t\twhile minele.lchild():\n\t\t\t\tminele = minele.lchild()\n\t\t\ttemp = node.value()\n\t\t\tnode.setvalue(minele.value())\n\t\t\tminele.setvalue(temp)\n\t\t\tself._internalRemove(minele)\n\t\t\treturn node.value()", "def _un_onnode(visitor, node, namespace):\n namespace.refresh(node._qname, node)\n visitor.mapacc(node._children, node._namespace)\n return node, namespace", "def removeNode(self, node: Node):\n if node in self.nodes:\n self.nodes.remove(node)\n else:\n print('!W', 'Scene:removeNode', 'wanna remove edge', node, 'from self.nodes but it is not in the list!')", "def deletenode(self, node_p=None):\n node_p = self.getnodenamed(node_p) # Verify pointer.\n # (node_bn* node)\n cnetica.DeleteNode_bn.argtypes = [c_void_p]\n cnetica.DeleteNode_bn.restype = None\n cnetica.DeleteNode_bn(node_p)", "def _remove_node(self, node):\n prev = node.prev\n new = node.next\n\n prev.next = new\n new.prev = prev", "def _delete_node(self, node):\n predecessor = node._prev\n successor = node._next\n predecessor._next = successor\n successor._prev = predecessor\n self._size -= 1\n element = node._element # record deleted element\n node._prev = node._next = node._element = None # deprecate node\n return element # return deleted element", "def delete_node(self, node):\n curr = self.head\n while curr.next is not None:\n if curr.next == node:\n break\n curr = curr.next\n curr.next = node.next\n node = None\n return", "def delete(self, key):\r\n node_to_delete = self._find_node(key)\r\n if node_to_delete is None:\r\n return None\r\n else:\r\n # Linked list with trailer node allows deleting current node easily without checking if the node is the last one\r\n deleted_value = node_to_delete.value[1]\r\n node_to_delete.value = node_to_delete.next.value\r\n node_to_delete.next = node_to_delete.next.next\r\n self.size -= 1\r\n if self.size < 0:\r\n raise ValueError(\"size fell below 0\")\r\n return deleted_value", "def _remove_node(self, node):\n previous = node.prev\n next_node = node.next\n\n previous.next = next_node\n next_node.prev = previous", "def test_delete_node_empty_returns_none(bst_empty):\n assert bst_empty.delete(5) is None", "def erase_node(node: Node):\n graph = node.graph\n node_id = node.id\n\n inputs = list(graph.in_edges(node_id, data=True))\n outputs = list(graph.out_edges(node_id, data=True))\n\n assert not (len(inputs) > 1 and len(outputs) > 1)\n\n if len(outputs) == 0 and len(inputs) != 0:\n for input, _, attrs in inputs:\n if Node(graph, node_id).has_and_set('is_output'):\n if graph.node[input]['kind'] == 'op':\n data_nodes = [u for u, v in graph.in_edges(input)]\n for data in data_nodes:\n graph.node[data]['is_output'] = graph.node[node_id]['is_output']\n else:\n graph.node[input]['is_output'] = graph.node[node_id]['is_output']\n\n if len(outputs) == 0 or len(inputs) == 0:\n graph.remove_node(node_id)\n return\n\n if len(outputs) == 1:\n output = outputs[0][1]\n for src, noop, attrs in inputs:\n graph.remove_edge(src, noop)\n graph.add_edge(src, output, **attrs)\n graph.remove_node(node_id)\n return\n\n if len(inputs) == 1:\n input = inputs[0][0]\n for noop, dst, attrs in outputs:\n graph.remove_edge(noop, dst)\n graph.add_edge(input, dst, **attrs)\n graph.remove_node(node_id)\n return", "def remove(self, node):\n curr, prev = self.find(node, inc_prev=True)\n if curr:\n self._remove(curr, prev)", "def remove(self, searchitem):\n to_remove = self.search_node(searchitem) # Result of search\n if to_remove: # If the search returned something\n return to_remove.remove_node()", "def get_node(\n node_id: int, graph: ControlDependenceGraph | CFG\n ) -> ProgramGraphNode | None:\n for node in graph.nodes:\n if node.index == node_id:\n return node\n return None", "def del_node (self, node):\n try:\n if isinstance(node, Node):\n node = node.id\n elif isinstance(node, Port):\n node = node.node.id\n self.network.remove_node(node)\n return True\n except NetworkXError:\n # There was no node in the graph\n return False", "def remove_node_from_graph(ugraph, node):\n neighbors = ugraph[node]\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)\n ugraph.pop(node)\n return ugraph", "def dig(node, *subElements):\n if not node:\n return None\n for name in subElements:\n nextNode = None\n for child in node.childNodes:\n if child.nodeType == child.ELEMENT_NODE and child.nodeName == name:\n nextNode = child\n break\n if nextNode:\n node = nextNode\n else:\n return None\n return node", "def get_node(self, key: str) -> Optional[Node]:", "def remove_item(self, item):\n node = self.find(item)\n if node:\n self.delete(node)\n return node.item\n else:\n return None", "def inorder_visit_v3(b: Optional[BTNode],\n visit: Callable[[BTNode], Any]) -> None:\n if b is None or b.data is None:\n pass\n else:\n inorder_visit_v3(b.left, visit) if b.left is not None else None\n visit(b)\n inorder_visit_v3(b.right, visit) if b.right is not None else None", "def upsert_node(self, node: Node, echo: bool = True) -> typing.Optional[\n Node]:\n pass", "def _delete_node(self, node):\n predecessor = node._prev\n successor = node._next\n predecessor._next = successor\n successor._prev = predecessor\n self._size -= 1\n element = node._element # record deleted element\n node._prev = node._next = node._element = None # deprecate node\n return element # return deleted element", "def unparse(node: ast.AST | None, code: str = '') -> str | None:\n if node is None:\n return None\n elif isinstance(node, str):\n return node\n return _UnparseVisitor(code).visit(node)", "def find_node(node, v):\n while node.value != v:\n node = node.right\n return node", "def remove(self, item):\n \"\"\"\n :type item: Node()\n :rtype None\n \"\"\"\n if self.head.getData() == item:\n self.head = self.head.getNext()\n return\n\n prev = curr = self.head\n while curr: \n if curr.getData() == item:\n prev.setNext(curr.getNext())\n break\n prev = curr\n curr = curr.getNext()", "def remove_node(self, node):\n # if the node is a part of the graph\n if node.get_name() in self.get_node_names():\n for edge in node.get_incident_edges(): # for every edge incident to the input node\n other_node = edge.get_other_node(node.get_name()) # get the other incident node object\n if other_node.get_name() in self.get_node_names(): # if the other node is a part of the graph\n self.remove_edge(tuple((node, other_node))) # remove the edge\n self.set_nodeset(\n set({\n vertex\n for vertex in self.get_nodeset()\n if not vertex.get_name().__eq__(node.get_name())\n })\n ) # remove the node from the graph's nodeset", "def remove_node(self, node):\n # remove the first Node \n if node == self.head:\n return self.remove_first()\n # remove the last Node\n elif node == self.tail:\n \n return self.remove_last()\n # set the skip back pointers after removing the Node and set the\n # preview Node to point on the next one(skip the removing node)\n if node.next != self.tail:\n if node.skip_back is not None:\n node.next.next.skip_back = node.skip_back.next\n else:\n node.next.next.skip_back = self.head\n if node.skip_back is not None:\n node.next.skip_back = node.skip_back\n node.skip_back.next.next = node.next\n else:\n self.head.next = node.next\n node.next.skip_back = None\n # disconnect the Node from the list\n node.next = None\n return node.data", "def __get_visited_node(self, node_name: str) -> Visit:\r\n # Checks the priority queue\r\n for visited_node in self.__priority_queue:\r\n if visited_node.node_name == node_name:\r\n return visited_node\r\n else:\r\n return next((visited_node for visited_node in self.__fully_visited if visited_node.node_name == node_name), None)", "def del_node(node, delnum):\n pass", "def remove_node(self, id):\r\n\t\tif id in self._nodes:\r\n\t\t\tnode = self._nodes[id]\r\n\t\t\tedges = node.edges()\r\n\t\t\t# ugly can maybe fix it up with sets\r\n\t\t\tfor edge in edges:\r\n\t\t\t\tlabel = edge.label\r\n\t\t\t\tdel edge.start_node._edges[label]\r\n\t\t\t\tdel edge.end_node._edges[label]\r\n\t\t\t\tdel self._edges[edge.id]\r\n\t\t\tdel self._nodes[id]\r\n\t\telse:\r\n\t\t\t# return a real exception someday\r\n\t\t\tprint('Error: Cannot remove node since id does not exist')", "def getOptionalTag(node, tag, option=\"\"):\n try:\n return getTag(node, tag)\n except TagError:\n return option", "def getNodeByID(self, node_id: int) -> Union[Node, None]:\n for node in self.nodes:\n if node.id == node_id:\n return node\n\n return None", "def __remove_node(self, target_node: _AVLTreeNode) -> None: \n\n if target_node is self.__root:\n self.__root = self.__get_subtree(target_node)\n if self.__root is not None:\n self.__root.parent = None\n else:\n new_child_node = self.__get_subtree(target_node)\n if new_child_node is not None: \n new_child_node.parent = target_node.parent\n\n if target_node is target_node.parent.left:\n target_node.parent.left = new_child_node\n else:\n target_node.parent.right = new_child_node", "def remove_node(self, node_id: int) -> bool:\r\n # check if key exists in dictionary by checking if get() returned default value\r\n if len(self.Edges[node_id])!=0:\r\n self.edgeSize -=len(self.Edges[node_id])\r\n del self.Edges[node_id]\r\n del self.Edges_In[node_id]\r\n #should do iteration on every node and check if this node_id is also neighbor there\r\n for key, value in self.Edges.items():\r\n if node_id in value:\r\n self.edgeSize -= 1\r\n del value[node_id]\r\n del self.Nodes[node_id]\r\n self.mc +=1\r\n return True\r\n raise NotImplementedError", "def delete_node(self,node,data):\n\n # Check if tree is empty.\n if node is None:\n return None\n\n # searching key into BST.\n if data < node.data:\n node.left = self.delete_node(node.left, data)\n elif data > node.data:\n node.right = self.delete_node(node.right, data)\n else: # reach to the node that need to delete from BST.\n if node.left is None and node.right is None:\n del node\n if node.left == None:\n temp = node.right\n del node\n return temp\n elif node.right == None:\n temp = node.left\n del node\n return temp\n\n return node", "def remove(self, value):\n node = self.first()\n # case 1 : in case of empty list, do nothing and return None\n if node is None:\n return None\n # case 2 : list has at least one element and node to be removed is the first element\n if node.value() == value:\n self.__head = node.next()\n self.__length -= 1\n node.set_next(None)\n return node\n # case 3 : list has at least one element and node to be removed is not the first element\n previous = node\n node = node.next()\n while node is not None:\n if node.value() == value:\n previous.set_next(node.next())\n self.__length -= 1\n node.set_next(None)\n return node\n else:\n node = node.next()\n return None\n\n ##############", "def default_visit(self, node):\n pass", "def remove_node(self, node):\n self.nodes.remove(node)\n node.close()", "def delete_node(name: str, value: str) -> None:\n global _graph\n\n if _graph is None:\n print('\\ndelete_node(): Error: graph has not been initialized or opened.\\n\\n')\n return\n\n lname = str(name)\n lvalue = str(value)\n\n if lname == '' or lvalue == '' or lname == 'nan' or lvalue == 'nan':\n return\n\n node = read_node(name=lname, value=lvalue)\n if node is None:\n return\n\n _graph.delete(node)\n return", "def delete_node(self, key):\n cur_node = self.head\n if cur_node and cur_node.data == key:\n self.head = cur_node.next\n cur_node = None\n return\n\n prev = None\n while cur_node and cur_node.data != key:\n prev = cur_node\n cur_node = cur_node.next\n\n if cur_node is None:\n return\n\n prev.next = cur_node.next\n cur_node = None", "def remove_node(self,node):\n if node.left.node.right: #Node has two children.\n #Find its in order successor\n successor = node.right\n while successor.left:\n successor = successor.left\n #copy the node\n node.key = successor.key\n node.value = successor.value\n #remove the successor\n self.remove_node(successor)\n elif node.left: #The node only has a left child.\n self.replace_node(node,node.left)\n elif node.right: #The node only has a right child.\n self.replace_node(node,node.right)\n else:\n self.replace_node(node, None)", "def remove(self,node,verbose=False):\n for label,parent in node.incoming:\n parent.outgoing.remove((label,node))\n for label,child in node.outgoing:\n child.incoming.remove((label,node))\n self.pop(node.nodeid)\n for x,y in copy(self.undirected):\n if x == node or y == node:\n self.undirected.remove((x,y))\n if self.root == node:\n self.root = None\n if verbose: print('removed',node)", "def remove_node(self, node):\n self.nodes.pop(self.nodes.index(node))\n node1 = node.neighbour1\n node2 = node.neighbour2\n node1.neighbour2 = node2\n node2.neighbour1 = node1", "def _delete(self, node: TreeNode) -> None:\n if node.height == 1: # node has no children\n if node.parent:\n if node.parent.left == node:\n node.parent.left = None\n else:\n node.parent.right = None\n else:\n self.root = None\n new_node = node.parent\n node = None\n elif node.left == None: # node has only right child\n if node.parent:\n if node.parent.left == node:\n node.parent.left = node.right\n else:\n node.parent.right = node.right\n else:\n self.root = node.right\n node.right.parent = node.parent\n new_node = node.parent\n node = None\n elif node.right == None: # node has only left child\n if node.parent:\n if node.parent.left == node:\n node.parent.left = node.left\n else:\n node.parent.right = node.left\n else:\n self.root = node.left\n node.left.parent = node.parent\n new_node = node.parent\n node = None\n else: # node has 2 children\n next_larger = self.successor(node.val)\n node.val = next_larger.val\n return self._delete(next_larger)\n self._inspect_changes(new_node)", "def unknown_visit(self, node):\n pass", "def remove_node(self, node):\n self.nodes[node.name] = node\n self.dirty = True", "def fn(node):\n if not node or node in (p, q): return node\n left, right = fn(node.left), fn(node.right)\n if left and right: return node \n return left or right", "def get_valid_node(node):\n try:\n PyNode = pm.PyNode(node)\n except pm.MayaNodeError:\n print('Error: no node named : %s' % node)\n return None\n\n return PyNode", "def deleteNode(self, key):\n\n cur = self.head\n while cur:\n if cur.data == key and cur == self.head:\n if not cur.next:\n cur = None\n self.head = None\n return\n else:\n afterNode = cur.next\n cur.next = None\n afterNode.prev = None\n cur = None\n self.head = afterNode\n return\n elif cur.data == key:\n if cur.next:\n afterNode = cur.next\n prev = cur.prev\n prev.next = afterNode\n afterNode.prev = prev\n cur.next = None\n cur.prev = None\n cur = None\n return\n else:\n prev = cur.prev\n prev.next = None\n cur.prev = None\n cur = None\n return\n cur = cur.next", "def generic_visit(self, node):\n\n visit_method_name = 'visit_' + node.__class__.__name__\n if hasattr(self, visit_method_name):\n method = getattr(self, visit_method_name)\n method(node)\n\n return node", "def remove_node(self, node_address):\n node = self.find_node(node_address[0], node_address[1])\n if node is None:\n raise ValueError\n else:\n for child_node in node.get_subtree():\n child_node.set_dead()\n\n node.remove_from_parent()\n node.parent = None\n node.set_dead()\n # self.nodes.remove(node)\n pass", "def retractnodefindings(self, node_p):\n node_p = self.getnodenamed(node_p) # Verify pointer.\n\n # (node_bn* node)\n cnetica.RetractNodeFindings_bn.argtypes = [c_void_p]\n cnetica.RetractNodeFindings_bn.restype = None\n cnetica.RetractNodeFindings_bn(node_p)", "def visit_required(\n parent: \"CSTNode\", fieldname: str, node: CSTNodeT, visitor: \"CSTVisitorT\"\n) -> CSTNodeT:\n visitor.on_visit_attribute(parent, fieldname)\n result = node.visit(visitor)\n if isinstance(result, RemovalSentinel):\n raise TypeError(\n f\"We got a RemovalSentinel while visiting a {type(node).__name__}. This \"\n + \"node's parent does not allow it to be removed.\"\n )\n elif isinstance(result, FlattenSentinel):\n raise TypeError(\n f\"We got a FlattenSentinel while visiting a {type(node).__name__}. This \"\n + \"node's parent does not allow for it to be it to be replaced with a \"\n + \"sequence.\"\n )\n\n visitor.on_leave_attribute(parent, fieldname)\n return result", "def remove_from_node(node, child_to_remove):\n\n children_names = list(History.children_names(node))\n try:\n child_to_remove_index = children_names.index(child_to_remove)\n result = History.children(History.children(node)[child_to_remove_index]), True\n del History.children(node)[child_to_remove_index]\n return result\n except ValueError:\n children_call_result = [History.remove_from_node(child_node, child_to_remove)\n for child_node in History.children(node)]\n return reduce(lambda l, r: l if snd(l) else r,\n children_call_result, ([], False))", "def postorder_visit_v3(b: Union[BTNode],\n visit: Callable[[BTNode], Any]) -> None:\n if b is None or b.data is None:\n pass\n else:\n postorder_visit_v3(b.left, visit) if b.left is not None else None\n postorder_visit_v3(b.right, visit) if b.right is not None else None\n visit(b)", "def delete_node(self, node):\n return self.manager.delete_node(self, node)", "def removeNode(self, node__to__remove): # Class O(nlog2n)\r\n # This is clear the worst function. It goes to different if statements before\r\n # start the 'real' computation to replace the value\r\n if node__to__remove > self.length():\r\n raise ValueError(\"Invalid position. The LinkedList has length %s\" % self.length())\r\n elif node__to__remove == 1:\r\n if self.length() == 1:\r\n raise ValueError(\"The LinkedList has only one node (the head)\")\r\n if self.length() == 2:\r\n self.head = Node(self.head.next)\r\n else:\r\n self.head = Node(self.head.next, self.head.next.next)\r\n elif (self.length() - 1) == node__to__remove:\r\n h = self.head\r\n count = 1\r\n while count != (node__to__remove - 1):\r\n h = h.next\r\n count += 1\r\n h.next = Node(h.next.next)\r\n elif self.length() == node__to__remove:\r\n h = self.head\r\n count = 2\r\n while count != (node__to__remove - 1):\r\n h = h.next\r\n count += 1\r\n h.next = Node(h.next)\r\n else:\r\n h = self.head\r\n count = 2\r\n while count != node__to__remove:\r\n h = h.next\r\n count += 1\r\n h.next = Node(h.next.next, h.next.next.next)", "def _targetof(node):\r\n if node is None: return None\r\n return node.target", "def dereference(self, peer, and_router=False):\n if peer == self.node:\n return\n\n self.peers.remove(peer)\n if and_router != True:\n return\n\n router = filter(lambda x: x.node == peer, self.routers)\n if not any(router): return\n self.routers.remove(router[0])", "def visit(self, node):\n method_name = 'visit_' + type(node).__name__\n visit_method = getattr(self, method_name, self.generic_visit)\n return visit_method(node)", "def test_RestrictingNodeTransformer__visit_Is__1():\n assert restricted_eval('None is None') is True", "def delete_on_node(\n node_ip: str, path: str, return_future: bool = False\n) -> Union[bool, ray.ObjectRef]:\n\n node_id = _get_node_id_from_node_ip(node_ip)\n\n delete_task = _remote_delete_path.options(num_cpus=0, **_force_on_node(node_id))\n future = delete_task.remote(path)\n\n if return_future:\n return future\n\n return ray.get(future)", "def delete_node(self, node: 'GraphNode'):\n\n self.operator.delete_node(node)", "def naive(head: ListNode) -> ListNode:\n if head is None or head.next is None: # Not possible to have a cycle\n return None\n seen = {} # A hash-set would work better\n curr = head\n while curr is not None:\n if curr in seen:\n return curr\n else:\n seen[curr] = True\n curr = curr.next\n return None", "def Delete(root, node):\n target = root.FindLeaf(node)\n if target == None:\n # print 'no result'\n print(\"no result\")\n return root\n target.leaves.remove(node)\n target.CondenseTree()\n root = root.CondenseRoot()\n return root", "def del_middle_node(node):\n if node == None or node.next == None:\n return\n\n node.data = node.next.data\n node.next = node.next.next", "def remove(self, key):\n if self.head.data == key: # checking first corner case of first node to be removed\n self.head = self.head.next\n return\n\n elif self.head is None: # checking second corner case of linked list being empty\n return\n\n else: # otherwise maintain two pointers and remove the required node\n curr_node = self.head.next\n prev_node = self.head\n while prev_node.next is not None:\n if curr_node.data == key:\n prev_node.next = curr_node.next\n return\n\n return", "def _(obj: Not, visitor: BooleanExpressionVisitor[T]) -> T:\n child_result: T = visit(obj.child, visitor=visitor)\n return visitor.visit_not(child_result=child_result)" ]
[ "0.6102222", "0.5962804", "0.583166", "0.5687837", "0.56851536", "0.56654507", "0.55203897", "0.54365003", "0.54352176", "0.5417704", "0.53179693", "0.5272227", "0.5247078", "0.52446026", "0.52202773", "0.51991373", "0.51991373", "0.51991373", "0.51991373", "0.51991373", "0.5171593", "0.5169277", "0.5126973", "0.5111011", "0.50832015", "0.5069344", "0.50600636", "0.50272727", "0.50196636", "0.50189376", "0.5015346", "0.50141203", "0.5010995", "0.4989621", "0.4987462", "0.49819672", "0.49790758", "0.49702132", "0.49698448", "0.49644527", "0.4936323", "0.49332178", "0.49192804", "0.49190742", "0.49092406", "0.48965418", "0.48738337", "0.4869262", "0.48666555", "0.4840359", "0.4824243", "0.4809383", "0.47970128", "0.47816837", "0.47787407", "0.47778988", "0.47776687", "0.47738653", "0.47685748", "0.476692", "0.47663754", "0.4763537", "0.47552133", "0.4751746", "0.47499338", "0.47400308", "0.47387347", "0.47371262", "0.4732017", "0.4715511", "0.4711953", "0.46982315", "0.46974167", "0.4690003", "0.46852636", "0.46835807", "0.4677338", "0.46763733", "0.4675693", "0.4674654", "0.46654746", "0.46647647", "0.4655727", "0.4652574", "0.46521032", "0.46496618", "0.46489698", "0.4648479", "0.46477792", "0.46447262", "0.46340978", "0.46214414", "0.46187562", "0.46176025", "0.4617053", "0.46131873", "0.4607626", "0.45997956", "0.45980552", "0.4589918" ]
0.58078337
3
Given a node that can be a real value or a sentinel value, visits the node if it is real with `visitor`. If the node is removed, returns MaybeSentinel.
def visit_sentinel( parent: "CSTNode", fieldname: str, node: Union[CSTNodeT, MaybeSentinel], visitor: "CSTVisitorT", ) -> Union[CSTNodeT, MaybeSentinel]: if isinstance(node, MaybeSentinel): visitor.on_visit_attribute(parent, fieldname) visitor.on_leave_attribute(parent, fieldname) return MaybeSentinel.DEFAULT visitor.on_visit_attribute(parent, fieldname) result = node.visit(visitor) if isinstance(result, FlattenSentinel): raise TypeError( f"We got a FlattenSentinel while visiting a {type(node).__name__}. This " + "node's parent does not allow for it to be it to be replaced with a " + "sequence." ) visitor.on_leave_attribute(parent, fieldname) return MaybeSentinel.DEFAULT if isinstance(result, RemovalSentinel) else result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def visit_optional(\n parent: \"CSTNode\", fieldname: str, node: Optional[CSTNodeT], visitor: \"CSTVisitorT\"\n) -> Optional[CSTNodeT]:\n if node is None:\n visitor.on_visit_attribute(parent, fieldname)\n visitor.on_leave_attribute(parent, fieldname)\n return None\n visitor.on_visit_attribute(parent, fieldname)\n result = node.visit(visitor)\n if isinstance(result, FlattenSentinel):\n raise TypeError(\n f\"We got a FlattenSentinel while visiting a {type(node).__name__}. This \"\n + \"node's parent does not allow for it to be it to be replaced with a \"\n + \"sequence.\"\n )\n visitor.on_leave_attribute(parent, fieldname)\n return None if isinstance(result, RemovalSentinel) else result", "def visit_required(\n parent: \"CSTNode\", fieldname: str, node: CSTNodeT, visitor: \"CSTVisitorT\"\n) -> CSTNodeT:\n visitor.on_visit_attribute(parent, fieldname)\n result = node.visit(visitor)\n if isinstance(result, RemovalSentinel):\n raise TypeError(\n f\"We got a RemovalSentinel while visiting a {type(node).__name__}. This \"\n + \"node's parent does not allow it to be removed.\"\n )\n elif isinstance(result, FlattenSentinel):\n raise TypeError(\n f\"We got a FlattenSentinel while visiting a {type(node).__name__}. This \"\n + \"node's parent does not allow for it to be it to be replaced with a \"\n + \"sequence.\"\n )\n\n visitor.on_leave_attribute(parent, fieldname)\n return result", "def find_node(node, v):\n while node.value != v:\n node = node.right\n return node", "def _visit(self, visit: Callable[[], Any], expected: Any) -> None:\n if isinstance(expected, type) and issubclass(expected, Exception):\n with raises(expected):\n visit()\n else:\n result = visit()\n if expected is math.nan:\n # Special case since nan != nan.\n assert math.isnan(result)\n else:\n assert result == expected\n # We also check types in particular because instances of `Node` are very\n # good at mimicking their underlying type's behavior, and it is easy to\n # fail to notice that the result contains nodes when it should not.\n self._check_is_same_type(result, expected)", "def _internalRemove(self, node):\n\t\tif not node:\n\t\t\treturn None\n\n\t\t#Case 1 - node is a leaf\n\t\tif (not node.lchild() and not node.rchild()):\n\t\t\tprint str(node.value()) + \": 1\"\n\t\t\tif (node is self._root):\n\t\t\t\tself._root = None\n\t\t\t\treturn node.value()\n\t\t\tif node.islchild():\n\t\t\t\tnode.parent().setlchild()\n\t\t\telse:\n\t\t\t\tnode.parent().setrchild()\n\t\t\treturn node.value()\n\n\t\t#Case 2 - node has only 1 child\n\t\tif (bool(node.lchild()) != bool(node.rchild())): #basically an XOR\n\t\t\tprint str(node.value()) + \": 2\"\n\t\t\tif node.lchild():\n\t\t\t\tif (node is self._root):\n\t\t\t\t\tself._root = node.lchild()\n\t\t\t\t\treturn node.value()\n\t\t\t\telse:\n\t\t\t\t\tif node.islchild():\n\t\t\t\t\t\tnode.parent().setlchild(node.lchild())\n\t\t\t\t\telse:\n\t\t\t\t\t\tnode.parent().setrchild(node.lchild())\n\t\t\t\t\treturn node.value()\n\t\t\telse:\n\t\t\t\tif (node is self._root):\n\t\t\t\t\tself._root = node.rchild()\n\t\t\t\t\treturn node.value()\n\t\t\t\telse:\n\t\t\t\t\tif node.islchild():\n\t\t\t\t\t\tnode.parent().setlchild(node.rchild())\n\t\t\t\t\telse:\n\t\t\t\t\t\tnode.parent().setrchild(node.rchild())\n\t\t\t\t\treturn node.value()\n\n\t\t#case 3 - node has 2 children\n\t\t#find minimum element in right subtree, switch data\n\t\t#delete the node that had the minimum element\n\t\tif (node.lchild() and node.rchild()):\n\t\t\tprint str(node.value()) + \": 3\"\n\t\t\tminele = node.rchild()\n\t\t\twhile minele.lchild():\n\t\t\t\tminele = minele.lchild()\n\t\t\ttemp = node.value()\n\t\t\tnode.setvalue(minele.value())\n\t\t\tminele.setvalue(temp)\n\t\t\tself._internalRemove(minele)\n\t\t\treturn node.value()", "def default_visit(self, node):\n raise ValueError('Unhandled Node %s.' % node)", "def default_visit(self, node):\n raise ValueError('Unhandled Node %s.' % node)", "def or_none(cls, node):\n return node if isinstance(node, cls) else None", "def test_RestrictingNodeTransformer__visit_Is__1():\n assert restricted_eval('None is None') is True", "def visit_none_type(self, left: NoneType) -> T:", "def _clean_graph_visit(self, node, visited):\n visited[node] = True\n\n while True:\n rp_node = None\n rp_id = -1\n for n_id, n in enumerate(node.get_children()):\n if n.get_type() == CFGNodeType.END_IF:\n rp_node = n\n rp_id = n_id\n break\n\n # end node points to only one child,\n # so replace it\n if rp_node is not None and rp_node.get_children() != []:\n node.get_children()[rp_id] = rp_node.get_children()[0]\n\n # END-IF can be replaced by another, so continue until there's none\n if rp_node == None:\n break\n\n if node.get_type() == CFGNodeType.PSEUDO:\n self._clean_graph_visit(node.get_refnode(), visited)\n\n for child in node.get_children():\n if child not in visited:\n self._clean_graph_visit(child, visited)", "def naive(head: ListNode) -> ListNode:\n if head is None or head.next is None: # Not possible to have a cycle\n return None\n seen = {} # A hash-set would work better\n curr = head\n while curr is not None:\n if curr in seen:\n return curr\n else:\n seen[curr] = True\n curr = curr.next\n return None", "def unknown_visit(self, node):\n pass", "def detect_loop(head):\n\n seen = set()\n\n current_node = head\n while current_node.next is not None:\n if current_node in seen:\n return current_node\n seen.add(current_node)\n current_node = current_node.next\n return None", "def _(obj: Not, visitor: BooleanExpressionVisitor[T]) -> T:\n child_result: T = visit(obj.child, visitor=visitor)\n return visitor.visit_not(child_result=child_result)", "def _purgeNode(self, data):\n\t\tq = c3.Queue()\n\t\tq.enqueue(self._root)\n\t\twhile q.peek():\n\t\t\tnode = q.dequeue()._data\n\t\t\tif node.value() == data:\n\t\t\t\treturn node\n\t\t\tif node.lchild():\n\t\t\t\tq.enqueue(node.lchild())\n\t\t\tif node.rchild():\n\t\t\t\tq.enqueue(node.rchild())\n\t\treturn None", "def test_RestrictingNodeTransformer__visit_IsNot__1():\n assert restricted_eval('2 is not None') is True", "def find_live_node(self, sender):\n if self.find_node(sender[0], sender[1]) is not None:\n return None\n node_list: list[GraphNode] = [self.root]\n while node_list:\n for node in node_list:\n if len(node.children) < 2: # is this syntax right?!\n return node\n node_list += node.children\n node_list.remove(node)\n pass", "def get_visitor(self, node: Node) -> \"t.Optional[VisitCallable]\":\n return getattr(self, f\"visit_{type(node).__name__}\", None)", "def remove(self, val: Generic[T]) -> None:\n def remove_node(node: Node) -> Node: #recursive function\n if node is self.node:\n return node\n if node.val == val: #removes all nodes with value val\n next_node = node.next\n prev_node = node.prev\n\n prev_node.next = next_node\n next_node.prev = prev_node\n remove_node(node.next)\n\n remove_node(self.node.next)", "def remove_node(self, value):\n node = self.head\n\n while node:\n if self.head.value == value:\n self.head = self.head.next\n return\n if node.next.value == value:\n node.next = node.next.next\n return\n node = node.next", "def find_node(self, value):\n cur = self.first\n while cur:\n if cur.value == value:\n return cur\n cur = cur.next\n return None", "def default_visit(self, node):\n pass", "def visit_IfExp(self, node):\n if node and not config.mutated:\n return self.visit_node(node)\n elif node and config.mutated and config.recovering:\n return self.recover_node(node)\n return node", "def get_visitor(self, node):\r\n method = 'visit_' + node.__class__.__name__\r\n return getattr(self, method, None)", "def invisible_visit(self, node: Node) -> None:\n pass", "def remove_first(self):\n if self.is_empty():\n raise self.NoSuchNodeException()\n\n tmp_val = self.head.data\n self.head = self.head.next_node\n self.list_size -= 1\n return tmp_val", "def delete(self, val):\n # We will be searching the bianry tree for the value.\n del_node = self.search(val) # Node to be deleted.\n if del_node is None: # This means that the value wasn't found.\n return False # Deletion not done\n self.no_of_Nodes -= 1\n while True:\n # Case 1: Leaf Node is being deleted (the simplest case).\n\n if del_node.is_leaf_node() is True:\n if del_node.node_type() == \"Left\":\n del_node.parent.left = None\n elif del_node.node_type == \"Right\": \n del_node.parent.left = None\n else:\n print(\"Weird bug in delete, God help us!\")\n del(del_node)\n return True # Deletion done\n # Case 2: Node with one child\n elif del_node.left is None or del_node.right is None:\n store_node = None\n if del_node.left is None:\n store_node = del_node.right\n else:\n store_node = del_node.left\n \n if del_node.node_type() == \"Root\":\n self.rootNode = store_node\n elif del_node.node_type() == \"Left\":\n del_node.parent.left = store_node\n elif del_node.node_type() == \"Right\":\n del_node.parent.right = store_node\n else:\n print(\"Weird bug 2 in delete, God help us!\")\n del(del_node)\n return True\n # Case 3: Node with 2 children\n elif del_node.left is not None and del_node.right is not None:\n inorder_suc = del_node.right\n inorder_suc = self.min_node(inorder_suc) # Finding inorder successor.\n del_node.val = inorder_suc.val # Replacing value of node to be deleted with successor's.\n del_node = inorder_suc # Deleting successor node\n else:\n print(\"Weird bug 3 in delete, God help us!\")\n print('Hello')", "def test_expected_none(transformer, event):\n\n nodes = transformer.transform(event)\n\n assert nodes is None", "def depart(visitor: DocxTranslator, node: Node):\n assert isinstance(visitor, DocxTranslator)\n assert isinstance(node, Node)\n\n visitor.p = None", "def getvalueofnode(node):\n return node.text if node is not None else None", "def getvalueofnode(node):\n return node.text if node is not None else None", "def next_node(self):\r\n to_visit = list(self.to_visit())\r\n if not to_visit:\r\n return None\r\n next_up = min(to_visit, key=lambda n: self.table[n][\"dist\"])\r\n connected = self.table[next_up][\"dist\"] != float(\"inf\")\r\n return next_up if connected else None", "def generic_visit(self, node: ast.AST) -> None:", "def find_node(self, value):\n for (fun, node) in self.__root.__fast_find:\n if fun(value):\n return node\n return None", "def getEquivNode(self, root, seen):\n\n if root.value in seen and seen[root.value].neighbors == root.neighbors:\n return seen[root.value]\n return root", "def getvalueofnode(node):\r\n return node.text if node is not None else None", "def remove(self,valor):\n\n if self.size==0:\n return False\n else:\n current=self.first\n try:\n while current.next.valor!=valor:\n current=current.next\n deleted_node=current.next\n current.next=deleted_node.next\n except AttributeError:\n return False\n self.size-=1\n return deleted_node", "def walk_to_root(self):\n\n my_node = self\n while not my_node.is_root:\n if my_node.descendent in (-1, None):\n break\n my_node = my_node.descendent\n return my_node", "def _analyse_stmt_Return(self, statement: ast.Return, *, next: CFNode) -> CFNode:\n if statement.value is None:\n nodes = dict(next=self._context[_LEAVE])\n else:\n nodes = dict(next=self._context[_RETURN])\n value_is_constant, _ = self._expression_as_constant(statement.value)\n if not value_is_constant:\n nodes.update(error=self._raise)\n return self._ast_node(statement, **nodes)", "def _find(self, val, cur_node):\n if val == cur_node.data:\n return cur_node\n elif val > cur_node.data:\n if not cur_node.right:\n return None\n return self._find(val, cur_node.right)\n elif val < cur_node.data:\n if not cur_node.left:\n return None\n return self._find(val, cur_node.left)", "def visit_If(self, node):\n if node and not config.mutated:\n for child in ast.iter_child_nodes(node):\n config.parent_dict[child] = node\n\n if self.operator[1] is StatementDeletion:\n for anode in node.body:\n if anode.__class__ in [ast.Raise, ast.Continue, ast.Break, ast.Assign, ast.AugAssign, ast.Call] and anode not in config.nodes_to_remove:\n config.nodes_to_remove.add(anode)\n elif anode.__class__ in [ast.Expr] and anode not in config.nodes_to_remove:\n config.nodes_to_potential.add(anode)\n node = self.mutate_single_node(node, self.operator)\n else:\n node = self.mutate_single_node(node, self.operator)\n if node and not config.mutated:\n self.dfs_visit(node)\n elif node and config.mutated and config.recovering:\n return self.recover_node(node)\n return node", "def getvalueofnode(node):\r\n if node is not None:\r\n return node.text\r\n else:\r\n None", "def find_first_node_from_here_by_rule(start_node, select):\n try:\n return next(find_nodes_from_here_by_rule(start_node, select))\n except StopIteration:\n return None", "def solution_alternate(head: ListNode) -> ListNode:\n DUMMY = ListNode(-1)\n curr = head\n while curr:\n if curr.next == DUMMY: # If the node is already pointing to the dummy, then it indicates start of cycle\n return curr\n\n next_copy = curr.next # Save link to the next node that is about to be re-pointed to the dummy\n curr.next = DUMMY # Re-point the next node to the dummy node\n curr = next_copy # Using the saved link, update curr\n return None", "def ignore_visit_interslide(self, node):\n raise SkipNode", "def visit_Node(self, node):\n pass", "def remove_by_value(self, data):\n pre_node = None\n for n in self:\n if n.data == data:\n if pre_node is None:\n self.pop()\n else:\n pre_node.next = n.next\n break\n pre_node = n\n else:\n raise ValueError(f'value [{data}] not found in linked list')", "def visit(self, node):", "def visit(self, node):", "def successor(self) -> Union[\"Node\", None]:\n if self.right is not None: # case 1: the node has a right child\n return self.right.min()\n\n else: # case 2: the node does not have a right child\n current = self\n while current.parent is not None: # traverse up\n if current == current.parent.left:\n return current.parent\n else:\n current = current.parent\n\n return None # the root is reached, so no successor exists", "def _dummy_node(self) -> CFNode:\n node = CFNode()\n self._graph.add_node(node)\n return node", "def remove(self, val):\n current_node = self.head\n previous_node = None\n\n while current_node:\n if current_node.val == val:\n if previous_node:\n previous_node.next = current_node.next\n else:\n self.head = current_node.next\n\n previous_node = current_node\n current_node = current_node.next", "def remove(self, value):\n node = self.first()\n # case 1 : in case of empty list, do nothing and return None\n if node is None:\n return None\n # case 2 : list has at least one element and node to be removed is the first element\n if node.value() == value:\n self.__head = node.next()\n self.__length -= 1\n node.set_next(None)\n return node\n # case 3 : list has at least one element and node to be removed is not the first element\n previous = node\n node = node.next()\n while node is not None:\n if node.value() == value:\n previous.set_next(node.next())\n self.__length -= 1\n node.set_next(None)\n return node\n else:\n node = node.next()\n return None\n\n ##############", "def delete(self, value):\n # Iterating to node that has value\n node = self.head\n last_node = None\n while node is not None and node.value != value:\n last_node = node\n node = node.next_\n\n # Check if the node has been found\n if node is None:\n return\n\n # Checking whether head matched\n if last_node is None:\n self.head = node.next_\n return\n\n # Deleting node\n last_node.next_ = node.next_", "def unknown_visit(self, node: Node) -> None:\n\n logger.warning(\"Handouts hit unexpected node: %s\", node)\n raise SkipNode", "def depart(visitor: DocxTranslator, node: Node):\n assert isinstance(visitor, DocxTranslator)\n assert isinstance(node, Node)", "def on_leave(\n self, original_node: CSTNodeT, updated_node: CSTNodeT\n ) -> Union[CSTNodeT, RemovalSentinel, FlattenSentinel[CSTNodeT]]:\n leave_func = getattr(self, f\"leave_{type(original_node).__name__}\", None)\n if leave_func is not None:\n updated_node = leave_func(original_node, updated_node)\n\n return updated_node", "def test_null_resets_on_normal_value(self):\n\n class Node:\n my_metric = Metric(Int64)\n\n node = Node()\n my_metric = get_metric_object(node, 'my_metric')\n node.my_metric = None\n\n self.assertTrue(my_metric.is_null(node))\n self.assertEqual(node.my_metric, None)\n\n exp_value = 5\n node.my_metric = exp_value\n\n self.assertFalse(my_metric.is_null(node))\n self.assertEqual(node.my_metric, exp_value)", "def remove(self, item):\n \"\"\"\n :type item: Node()\n :rtype None\n \"\"\"\n if self.head.getData() == item:\n self.head = self.head.getNext()\n return\n\n prev = curr = self.head\n while curr: \n if curr.getData() == item:\n prev.setNext(curr.getNext())\n break\n prev = curr\n curr = curr.getNext()", "def search(self, val):\n current = self.head\n # import pdb; pdb.set_trace()\n while current is not None:\n if current.data == val:\n return current\n current = current.next_node\n return None", "def find_first_node_by_rule(root_node, select):\n try:\n return next(find_nodes_by_rule(root_node, select))\n except StopIteration:\n return None", "def valf(node: md.Document) -> float:\n try:\n return float(val(node))\n except ValueError:\n return None", "def search_node(self, searchitem):\n if self._element is None:\n return None\n elif self._element == searchitem:\n pass\n elif not self._leftchild and not self._rightchild:\n return None\n elif searchitem < self._element:\n if not self._leftchild:\n return None\n return self._leftchild.search_node(searchitem)\n elif self._element < searchitem:\n if not self._rightchild:\n return None\n return self._rightchild.search_node(searchitem)\n return self", "def next_sibling(node, name):\n while node.nextSibling is not None:\n node = node.nextSibling\n if node.nodeType == node.ELEMENT_NODE and node.tagName == name:\n return node\n return None", "def removeNode(node, head, tail) :\n if node is head :\n node = None\n return (None, None)\n if node is tail :\n tail = tail.prev\n tail.next = None\n node.prev = None\n return (head, tail)\n node.prev.next = node.next\n node.next.prev = node.prev\n node.next = None\n node.prev = None\n node = None\n return (head, tail)", "def generic_visit(self, node):\n\n visit_method_name = 'visit_' + node.__class__.__name__\n if hasattr(self, visit_method_name):\n method = getattr(self, visit_method_name)\n method(node)\n\n return node", "def test_null_value(self):\n\n class Node:\n my_metric = Metric(Int64)\n\n node = Node()\n my_metric = get_metric_object(node, 'my_metric')\n node.my_metric = None\n\n self.assertTrue(my_metric.is_null(node))\n self.assertEqual(node.my_metric, None)", "def set_nodevalue(self, node, value, V=\"Value\", Conditional=False):\n\n if self.mReplaceNewline:\n value = re.sub(\"\\n\", \"###newline_escape###\",value)\n\n if Conditional:\n if self.get_nodevalue(node, V) == value: return None\n\n node.set(V, value)\n return node", "def find_successor(self, node): \n current_node = node\n\n if current_node.right != None:\n current_node = current_node.right\n while current_node.get_children() != 0:\n if current_node.left != None:\n current_node = current_node.left\n else:\n current_node = current_node.right\n return current_node\n else:\n return None", "def visit_not(self, child_result: T) -> T:", "def _min_node(node):\n if not node:\n return None\n i = node\n while i.left:\n i = i.left\n return i", "def visit_WhileNode(self, node: WhileNode, symbol_table: SymbolTable) -> None:\n while True:\n if self.visit(node.cond, symbol_table).value == 0:\n break\n else:\n for expr in node.body:\n if expr is not None:\n if isinstance(expr, ReturnNode):\n return expr\n res = self.visit(expr, symbol_table)\n if isinstance(res, ReturnNode):\n return res", "def deleteNode(self, node: ListNode, n: int) -> None:\n while node.val != n:\n node = node.next\n\n if node.val == n:\n node.val = node.next.val\n node.next = node.next.next", "def _search(cls, node, value):\n if node is None:\n return False\n\n if node.value == value:\n return True\n\n return cls._search(node.next_, value)", "def remove(self, value):\n # find node to be removed\n node = self.find(value)\n\n # value does not exist: abort\n if node == None:\n print('Removal failure: Node with value ', value, ' not found')\n return\n\n # value exists: find best substitute candidate\n # node to be removed is a leaf: remove it\n if node.left == None and node.right == None:\n parent = node.parent\n self.updateNodeParentChild(node, None)\n\n # node to be removed has left child: find left child most right node\n elif node.left != None:\n\n # find substitute\n substitute = node.left\n while substitute.right != None:\n substitute = substitute.right\n\n # update node value to substitute value\n node.value = substitute.value\n\n # update substitute's parent child, and this child's parent\n parent = substitute.parent\n if parent == node:\n node.left = substitute.left\n else:\n parent.right = substitute.left\n if substitute.left != None:\n substitute.left.parent = parent\n\n # node to be removed has only right child: find right child most left nd\n else:\n\n # find substitute\n substitute = node.right\n while substitute.left != None:\n substitute = substitute.left\n\n # update node value to substitute value\n node.value = substitute.value\n\n # update substitute's parent child, and this child's parent\n parent = substitute.parent\n if parent == node:\n node.right = substitute.right\n else:\n parent.left = substitute.right\n if substitute.right != None:\n substitute.right.parent = parent\n\n # value updated and node removed: rebalance tree\n self.rebalance(parent)", "def visit(self, node):\n method_name = 'visit_' + type(node).__name__\n visit_method = getattr(self, method_name, self.generic_visit)\n return visit_method(node)", "def sibling_extract(extracted_tag, next_tag = \"td\", replacement_value = None):\n try:\n # using find_next to find the sibling with the specified tag\n value = extracted_tag.find_next(next_tag).text\n except:\n value = None\n\n return value", "def front_node(self):\n return self.sentinel.next if self.N != 0 else None", "def binary_search(node, value):\n aux_node = None\n while node is not None and node.value != value:\n if value < node.value:\n aux_node = node.left\n node = aux_node\n else:\n aux_node = node.right\n node = aux_node\n return node if node.value == value else None", "def test_search_returns_none_when_value_notin_tree_left(bst_all_to_left):\n assert bst_all_to_left.search(0) is None", "def simplify_negation_node(parse_str=None, location=None, tokens=None):\n negated = False\n value = None\n # Collapse a sequence of negations into zero or one.\n for t in tokens:\n if t == \"!\":\n negated = not negated\n else:\n # Grab the value from the end of the list of tokens, we should\n # only hit this once, for the final item in the list.\n assert value is None, \"Unexpected additional value\"\n value = t\n if negated:\n # Expression simplified to a negation.\n return NegationNode(value)\n else:\n # Expression simplified to no negation, return the value directly.\n return value", "def removeNode(self, node__to__remove): # Class O(nlog2n)\r\n # This is clear the worst function. It goes to different if statements before\r\n # start the 'real' computation to replace the value\r\n if node__to__remove > self.length():\r\n raise ValueError(\"Invalid position. The LinkedList has length %s\" % self.length())\r\n elif node__to__remove == 1:\r\n if self.length() == 1:\r\n raise ValueError(\"The LinkedList has only one node (the head)\")\r\n if self.length() == 2:\r\n self.head = Node(self.head.next)\r\n else:\r\n self.head = Node(self.head.next, self.head.next.next)\r\n elif (self.length() - 1) == node__to__remove:\r\n h = self.head\r\n count = 1\r\n while count != (node__to__remove - 1):\r\n h = h.next\r\n count += 1\r\n h.next = Node(h.next.next)\r\n elif self.length() == node__to__remove:\r\n h = self.head\r\n count = 2\r\n while count != (node__to__remove - 1):\r\n h = h.next\r\n count += 1\r\n h.next = Node(h.next)\r\n else:\r\n h = self.head\r\n count = 2\r\n while count != node__to__remove:\r\n h = h.next\r\n count += 1\r\n h.next = Node(h.next.next, h.next.next.next)", "def del_middle_node(node):\n if node == None or node.next == None:\n return\n\n node.data = node.next.data\n node.next = node.next.next", "def _find_node(self, item):\n # Start with the root node\n node = self.root\n # Loop until we descend past the closest leaf node\n while node is not None:\n # TODO: Check if the given item matches the node's data\n if ...:\n # Return the found node\n return node\n # TODO: Check if the given item is less than the node's data\n elif ...:\n # TODO: Descend to the node's left child\n node = ...\n # TODO: Check if the given item is greater than the node's data\n elif ...:\n # TODO: Descend to the node's right child\n node = ...\n # Not found\n return None", "def test_node_value():\n from linked_list import Node\n node2 = Node(\"Something\")\n assert node2.value == \"Something\" and node2.nxt is None", "def delete_ll_node(node):\n node.val = node.next.val\n node.next = node.next.next", "def remove(self, data):\n\n traverse = self.head\n temp = self.head\n if self.head == None:\n return None\n\n if traverse.data == data:\n self.head = traverse.next\n return\n\n while traverse.next != None:\n\n temp = traverse.next\n if temp.data == data:\n traverse.next = temp.next\n return\n\n traverse = traverse.next", "def leaf_NoneType(self, value, depth, available):\n return \"null\", False", "def remove_value(self, value):\n if self.head is None: \n raise ValueError('Deleting from empty list.')\n node = self.head \n if node.value == value: \n self.head = self.head.next_node \n return node \n while node.next_node is not None:\n current = node.next_node \n if current.value == value:\n node.next_node = current.next_node \n return current \n node = current\n raise ValueError('Deleting non-existing value.')", "def delete_helper(root: TreeNode, key) -> TreeNode:\n if root is None:\n return None\n if key < root.key:\n new_root_left = delete_helper(root.left, key) # get new root of left subtree\n root.left = new_root_left # assign root.left to the new root of the left subtree\n elif key > root.key:\n new_root_right = delete_helper(root.right, key)\n root.right = new_root_right\n else: # found match, handle 3 cases\n # case 1 - match is a leaf node (return None back up the stack)\n if root.left is None and root.right is None:\n return None # root of new subtree is None\n # case 2 - match has one child (return the other back up the stack)\n elif root.left is None:\n return root.right # return the right subtree back up the stack to indicate that its the new root\n elif root.right is None: # vice-versa\n return root.left\n # case 3 - replace match with inorder successor; delete the successor; return up the stack\n else:\n inorder_successor = self.get_min_node(root.right)\n root.key, root.val = inorder_successor.key, inorder_successor.val # copy successor into current\n new_root_successor = delete_helper(root.right, inorder_successor.key) # delete inorder successor\n root.right = new_root_successor\n return root\n\n return root # return root of resulting tree as required", "def _del(self, handle=\"\", node=\"\", edge=\"\", subg=\"\"):\n head, tail = '', ''\n if edge:\n head, tail = edge\n\n node, head, tail, subg = map(encode_page, [node, head, tail, subg])\n\n self.changed = 1\n if head and tail:\n item = gv.findedge(gv.findnode(handle, head),\n gv.findnode(handle, tail))\n elif node:\n item = gv.findnode(handle, node)\n elif subg:\n item = gv.findsubg(handle, subg)\n elif handle:\n item = handle\n else:\n raise ValueError(\"No graph element or element type specified\")\n if item:\n gv.rm(item)", "def find_first_node(root_node, key):\n try:\n return next(find_nodes(root_node, key))\n except StopIteration:\n return None", "def remove_recursive(self, value, node=None):\n if node == None:\n node = self.head\n\n if node.value == value:\n if node.prev:\n node.prev.next = node.next\n else:\n self.head = node.next\n if node.next:\n node.next.prev = node.prev\n elif node.next:\n self.remove_recursive(value, node.next)", "def mutate_single_node(self, node, operator):\n if node.__class__ is operator[0] or (operator[1] is StatementDeletion and node.__class__ is ast.Pass):\n mutated_node = operator[1].mutate(node)\n node = mutated_node\n\n return node", "def fn(node, x):\n if not node: return False \n if not node.left and not node.right: return node.val == x\n return fn(node.left, x-node.val) or fn(node.right, x-node.val)", "def visitor(node: NodeT, left_distribute: bool) -> NodeT:\n if isinstance(node, ir.AddSub):\n items = OrderedDict() # type: Dict[ir.Node, List[Tuple[str, ir.Node]]]\n new_operators = []\n new_operands = []\n for operator, operand in zip(('+',) + getattr(node, 'operator'),\n getattr(node, 'operand')):\n if (operator == '+' and isinstance(operand, ir.MulDiv) and\n getattr(operand, 'operator') == ('*',)):\n if left_distribute:\n coeff, item = getattr(operand, 'operand')\n else:\n item, coeff = getattr(operand, 'operand')\n items.setdefault(coeff, []).append((operator, item))\n else:\n new_operators.append(operator)\n new_operands.append(operand)\n for coeff, item in items.items():\n operator, operand = zip(*item)\n assert operator[0] == '+'\n new_operators.append(operator[0])\n if len(operand) > 1:\n new_item = ir.AddSub(operator=operator[1:], operand=operand)\n else:\n new_item = operand[0]\n if left_distribute:\n children = coeff, new_item\n else:\n children = new_item, coeff\n new_operands.append(ir.MulDiv(operator=('*',), operand=children))\n if len(new_operands) > 1:\n assert new_operators[0] == '+'\n new_node = ir.AddSub(operator=tuple(new_operators[1:]),\n operand=tuple(new_operands))\n if new_node != node:\n return new_node # type: ignore\n elif new_operands and new_operands[0] != node:\n return new_operands[0]\n return node", "def delete_node(head, nodetodelete):\n\n #Nothing to do if the head is None or the nodetodelete is None\n if not head and not nodetodelete:\n print 'Nothing to delete. No arguments passed in'\n return\n\n #is the nodetodelete the head node, deal with that.\n node=head\n\n while(node.next):\n if(node.next == nodetodelete):\n if(node.next.next == None):\n node.next = None\n else:\n node.next = node.next.next\n\n #After finding the relevant nodetodelete, break out of the loop\n break\n node = node.next", "def _nxt_inorder(self, nxt, node, val):\n if node.left and node.right:\n gen = self.in_order_traversal()\n path = None\n while path is not val:\n path = next(gen)\n nxt = self.search(next(gen))\n return nxt", "def visit_UnaryOp(self, node):\n if node and not config.mutated:\n return self.visit_node(node)\n elif node and config.mutated and config.recovering:\n return self.recover_node(node)\n return node" ]
[ "0.59443796", "0.56053245", "0.5583689", "0.54266834", "0.52108747", "0.5164809", "0.5164809", "0.5084462", "0.50170124", "0.49877784", "0.49669993", "0.49597752", "0.48452675", "0.48225835", "0.47906753", "0.47757596", "0.47500685", "0.47187987", "0.47036573", "0.46635467", "0.46136975", "0.4606454", "0.458233", "0.45761645", "0.45578027", "0.45480385", "0.45469034", "0.45467138", "0.45370212", "0.45195153", "0.4474341", "0.4474341", "0.44508123", "0.44352588", "0.44332242", "0.44317847", "0.4420629", "0.4418036", "0.44099155", "0.43775544", "0.4377362", "0.43768913", "0.43718675", "0.437083", "0.43698373", "0.4366279", "0.43628094", "0.43596283", "0.4357512", "0.4357512", "0.43505257", "0.4347592", "0.43421996", "0.434137", "0.43276942", "0.43263236", "0.43220037", "0.4319255", "0.43118298", "0.43098867", "0.43042323", "0.42954352", "0.4288797", "0.42852378", "0.42843872", "0.4281263", "0.42700037", "0.42689556", "0.4265592", "0.42622113", "0.42574403", "0.42545697", "0.42523885", "0.4249699", "0.42362067", "0.42289594", "0.42269096", "0.4220284", "0.42194638", "0.42180905", "0.42170808", "0.42055225", "0.42053622", "0.42032146", "0.42017508", "0.41957673", "0.41933277", "0.41852054", "0.41832203", "0.41804048", "0.4176429", "0.41744968", "0.4174186", "0.4172995", "0.41672978", "0.4163972", "0.41626942", "0.41554672", "0.41541827", "0.41516817" ]
0.63893396
0
Given an iterable of children, visits each child with `visitor`, and yields the new children with any `RemovalSentinel` values removed.
def visit_iterable( parent: "CSTNode", fieldname: str, children: Iterable[CSTNodeT], visitor: "CSTVisitorT", ) -> Iterable[CSTNodeT]: visitor.on_visit_attribute(parent, fieldname) for child in children: new_child = child.visit(visitor) if isinstance(new_child, FlattenSentinel): yield from new_child elif not isinstance(new_child, RemovalSentinel): yield new_child visitor.on_leave_attribute(parent, fieldname)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def visit_body_iterable(\n parent: \"CSTNode\",\n fieldname: str,\n children: Sequence[CSTNodeT],\n visitor: \"CSTVisitorT\",\n) -> Iterable[CSTNodeT]:\n\n visitor.on_visit_attribute(parent, fieldname)\n for child in children:\n new_child = child.visit(visitor)\n\n # Don't yield a child if we removed it.\n if isinstance(new_child, RemovalSentinel):\n continue\n\n # Don't yield a child if the old child wasn't empty\n # and the new child is. This means a RemovalSentinel\n # caused a child of this node to be dropped, and it\n # is now useless.\n\n if isinstance(new_child, FlattenSentinel):\n for child_ in new_child:\n if (not child._is_removable()) and child_._is_removable():\n continue\n yield child_\n else:\n if (not child._is_removable()) and new_child._is_removable():\n continue\n # Safe to yield child in this case.\n yield new_child\n visitor.on_leave_attribute(parent, fieldname)", "def _filter_xml_iter(xml, ignored_nodes, ignored_attribs):\n for node in xml.iter():\n if node.tag.startswith('gfn_') and node.tag not in ignored_nodes:\n attribs = _pull_child_attribs(node)\n for attr, val in list(attribs.items()):\n if attr in ignored_attribs:\n del attribs[attr]\n elif val.startswith('e_id_') or attr.endswith('_obj'):\n del attribs[attr]\n elif 'address' in attr:\n del attribs[attr]\n elif attr == 'handle' and val.startswith('0x'):\n del attribs[attr]\n yield node", "def filter(self, predicate):\n self.children = [c for c in self.children if predicate(c)]\n for c in self.children:\n c.filter(predicate)", "def all_children_sorted_untagged(self):\n\n for child in self.all_children_sorted():\n if not child.tags:\n yield child", "def descendants(self,\n filter_fn: tp.Optional[tp.Callable[[NamedNode], bool]]=None) \\\n -> tp.Generator[NamedNode, None, None]:\n queue = deque()\n queue.append(self)\n\n while True:\n if not queue:\n break\n\n node = queue.popleft()\n\n if node is not self and (not filter_fn or filter_fn(node)):\n yield node\n\n for tag_view in node.children:\n queue.extend(tag_view)", "def children(self):\r\n descendants = self.q(css=self._bounded_selector(self.BODY_SELECTOR)).map(\r\n lambda el: XBlockWrapper(self.browser, el.get_attribute('data-locator'))).results\r\n\r\n # Now remove any non-direct descendants.\r\n grandkids = []\r\n for descendant in descendants:\r\n grandkids.extend(descendant.children)\r\n\r\n grand_locators = [grandkid.locator for grandkid in grandkids]\r\n return [descendant for descendant in descendants if not descendant.locator in grand_locators]", "def filtered_leaves_iterator(node, select=None, ignore=None):\n if ignore and ignore(node): # pragma: no branch\n return\n if 0 == node.children_count:\n if select is None or select(node): # pragma: no branch\n yield node\n else:\n for child in node.children:\n yield from filtered_leaves_iterator(child, select, ignore)", "def leaves(self):\n for node in self:\n if not node.outgoing:\n yield node", "def filtered_postorder_iterator(node, select=None, ignore=None):\n if ignore and ignore(node):\n return\n for child in node.children:\n yield from filtered_postorder_iterator(child, select, ignore)\n if select is None or select(node):\n yield node", "def visit_children(self, func):\n for child in self._children:\n func(child)", "def descendants(self):\n for a in self._related(set(), 'children'):\n yield a", "def iter_child_nodes(predicate, cursor):\n return (c for c in cursor.get_children() if predicate(c))", "def _remove_children_dependency(self):\n for child in self.children:\n child.parent_depencencies_left -= 1", "def reset_unimportant_nodes(iterable = None, threshold = None, label = None):\n\tfor graph in iterable:\n\t\tfor node, data in graph.nodes_iter(data=True):\n\t\t\tif float(data['importance']) < threshold:\n\t\t\t\tdata['label'] = label\n\t\tyield graph", "def all_children(self):\n\n for child in self.children:\n yield child\n for sub_child in child.all_children():\n yield sub_child", "def __iter__(self):\n for child in self.children:\n yield child", "def each_child(\n self,\n search_range=None,\n descended_from_type=_otio.Composable,\n shallow_search=False,\n):\n for child in self.children_if(descended_from_type, search_range, shallow_search):\n yield child", "def __iter__(self):\n\n for i in self._children:\n yield i", "def leaves(tree):\n for subtree in tree.subtrees(filter = lambda t: t.label()=='NP'):\n # The yield statement suspends function’s execution and sends a value back to the caller.\n yield subtree.leaves()", "def filterfalse(iterable, predicate):\n for x in iterable:\n if not predicate(x):\n yield x", "def exclude_nodes(self, nodes):", "def _trim_tree(state):\n for n in list(state.tree.leaf_node_gen):\n if n.type_str == TYPE_NODE_TAG:\n n.parent.child_list.remove(n)\n return _trim_tree(state)", "def roots(self):\n for node in self:\n if not node.incoming:\n yield node", "def filter(iterable, predicate):\n\n for x in iterable:\n if predicate(x):\n yield x", "def prune_tree(tree, cutoff, posteriors):\n new_tree = []\n for e in tree:\n try:\n if posteriors[e] > cutoff:\n new_tree.append(e)\n except KeyError:\n if posteriors[e[::-1]] > cutoff:\n new_tree.append(e)\n return new_tree", "def _dfs_non_recursive(self, footprints):\n visited = {}\n for v in footprints:\n visited[v] = False\n cluster = []\n end_of_scan = footprints[0]\n for v in footprints:\n if not any(x != True for x in visited.values()) and cluster:\n cluster.append(end_of_scan)\n self.resorts.append(cluster)\n break\n if not visited[v]:\n yield v\n visited[v] = True\n stack = [(v, iter(self.neighbours[v]))]\n if v != end_of_scan and cluster:\n cluster.append(end_of_scan)\n self.resorts.append(cluster)\n end_of_scan = 0\n cluster = []\n while stack:\n _, neighbourlist = stack[-1]\n try:\n neighbour = next(neighbourlist)\n if not visited[neighbour]:\n yield neighbour\n visited[neighbour] = True\n stack.append((neighbour, iter(self.neighbours[neighbour])))\n cluster.append(neighbour)\n except StopIteration:\n end_of_scan = v\n stack.pop()", "def prune( self ):\n if self.children is None:\n return\n \n # recursively prune from bottom up\n for space in self.children:\n space.prune()\n\n # if all child nodes are empty remove them all\n for space in self.children:\n if not space.is_empty():\n return\n\n self.children = None", "def find_children(self):\r\n for i in range(len(self.vertices)):\r\n self.vertices[i].children = []\r\n for i in range(len(self.vertices)):\r\n for parent in self.vertices[i].parents:\r\n if i not in self.vertices[parent].children:\r\n self.vertices[parent].children.append(i)", "def leaves(tree):\n for subtree in tree.subtrees(filter = lambda t: t.node=='NP'):\n yield subtree.leaves()", "def extend_to_children(self, indices):\n def get_children(i):\n model = i.model()\n rows = model.rowCount(parent=i)\n for row in range(rows):\n child = model.index(row, 0, parent=i)\n yield child\n\n subitems = set()\n for i in indices:\n valid_parent = i.parent().isValid()\n if valid_parent and i not in subitems:\n subitems.add(i)\n\n if self._hierarchy_view:\n # Assume this is a group node\n for child in get_children(i):\n subitems.add(child)\n else:\n # is top level node\n for child in get_children(i):\n subitems.add(child)\n\n return list(subitems)", "def __next__(self):\n for child in self.children:\n yield child", "def child_ents(self) -> Iterator['Entity']:\n for ent in self.vmf.entities:\n if self.id in ent.visgroup_ids:\n yield ent", "def remove_nodes(self, nodes):\n for node in nodes:\n for arc in node.entries:\n arc.src.exits.remove(arc)\n self.arcs.remove(arc)\n for arc in node.exits:\n arc.dest.entries.remove(arc)\n self.arcs.remove(arc)\n self.nodes.remove(node)\n dangling_nodes = []\n for node in self.nodes:\n if node == self.start or node == self.end:\n pass\n else:\n if not node.exits or not node.entries:\n dangling_nodes.append(node)\n if dangling_nodes:\n self.remove_nodes(dangling_nodes)", "def reap_children(children, config, logger):\n to_delete = []\n current_time = time.time()\n for eventid, info in children.items():\n returncode = info['popen'].poll()\n if returncode is not None:\n logger.info('Reaped child for event %s (return code %d)' %\n (eventid, returncode))\n to_delete.append(eventid)\n continue\n #\n # Kill children who take too long\n #\n if info['start_time'] + config['max_process_time'] < current_time:\n logger.warning('Event %s taking too long, killing' % eventid)\n info['popen'].kill()\n info['popen'].wait()\n logger.warning('Reaped child for killed event %s' % eventid)\n to_delete.append(eventid)\n\n for eventid in to_delete:\n del children[eventid]\n\n return", "def children(self,parent):\n node = self._validate(parent)\n children_pos_list = [self._make_position(child) for child in node._children]\n for child in children_pos_list:\n yield child", "def leaves(tree):\n\tfor subtree in tree.subtrees(filter = lambda t: t.label()=='NP'):\n\t\tyield subtree.leaves()", "def prune_influence_map(self):\n im = self.get_im()\n\n # First, remove all self-loops\n logger.info('Removing self loops')\n edges_to_remove = []\n for e in im.edges():\n if e[0] == e[1]:\n logger.info('Removing self loop: %s', e)\n edges_to_remove.append((e[0], e[1]))\n # Now remove all the edges to be removed with a single call\n im.remove_edges_from(edges_to_remove)\n\n # Remove parameter nodes from influence map\n remove_im_params(self.model, im)\n\n # Now compare nodes pairwise and look for overlap between child nodes\n logger.info('Get successors of each node')\n succ_dict = {}\n for node in im.nodes():\n succ_dict[node] = set(im.successors(node))\n # Sort and then group nodes by number of successors\n logger.info('Compare combinations of successors')\n group_key_fun = lambda x: len(succ_dict[x])\n nodes_sorted = sorted(im.nodes(), key=group_key_fun)\n groups = itertools.groupby(nodes_sorted, key=group_key_fun)\n # Now iterate over each group and then construct combinations\n # within the group to check for shared sucessors\n edges_to_remove = []\n for gix, group in groups:\n combos = itertools.combinations(group, 2)\n for ix, (p1, p2) in enumerate(combos):\n # Children are identical except for mutual relationship\n if succ_dict[p1].difference(succ_dict[p2]) == set([p2]) and \\\n succ_dict[p2].difference(succ_dict[p1]) == set([p1]):\n for u, v in ((p1, p2), (p2, p1)):\n edges_to_remove.append((u, v))\n logger.debug('Will remove edge (%s, %s)', u, v)\n logger.info('Removing %d edges from influence map' %\n len(edges_to_remove))\n # Now remove all the edges to be removed with a single call\n im.remove_edges_from(edges_to_remove)", "def leaves(tree):\n for subtree in tree.subtrees(filter = lambda t: t.label()=='NP'):\n yield subtree.leaves()", "def leafs(self):\n for node in self.graph:\n if isinstance(node, Molecule) and not self.graph[node]:\n yield node", "def _process_children(self, node):\n for kid in node.children:\n self._process_node(kid)", "def v6_lstrip(iterable, strip_value):\n iterator = iter(iterable)\n for item in iterator:\n if item != strip_value:\n yield item\n break\n yield from iterator", "def get_child_nibbles(self):\n tg = self.get_child_tokens()\n cg = self.get_child_cursors()\n t = next(tg, None)\n c = next(cg, None)\n while t is not None or c is not None:\n if c is None:\n yield t\n t = next(tg, None)\n elif t is None:\n yield c\n c = next(cg, None)\n elif t < c: # orphan token\n yield t # orphan\n t = next(tg, None)\n elif t <= c.end: # token internal to child cursor\n t = next(tg, None) # consume without emitting\n else: # child cursor after internal tokens exhausted\n yield c\n c = next(cg, None)\n # Discard tokens that are past the end of our stated range\n if t is not None and t > self.end:\n t = None", "def create_children(input_node, node_holder):\n # Get all ring-ring splits\n ring_ring_splits = get_ring_ring_splits(input_node.RDMol)\n if ring_ring_splits:\n for ring_ring_split in ring_ring_splits:\n add_child_and_edge(\n ring_ring_split, input_node, \"[Xe]\", node_holder, ring_ring=True\n )\n fragments = get_fragments(input_node.RDMol)\n if len(fragments) < 2:\n return\n # Now remove one item on each iteration\n for i in range(len(fragments)):\n new_list = []\n for j, item in enumerate(fragments):\n if i == j:\n excluded_smi = item\n continue\n new_list.append(item)\n add_child_and_edge(new_list, input_node, excluded_smi, node_holder)", "def remove_nodes_from(self, nodes):\n for node in nodes:\n self.remove_node(node)", "def walk(self):\n yield self\n for child in self.children:\n for descendant in child.walk():\n yield descendant", "def prune(self, rng, get_nodes, max_depth=1):\n if not self.children:\n return\n for i_c, child in enumerate(self.children):\n if child.min_depth >= max_depth:\n self.children[i_c] = Node(\n rng.choice(get_nodes(arity=0)),\n self.tree_type)\n self.children[i_c].parent = self\n elif max_depth > 1:\n child.prune(rng, get_nodes, max_depth - 1)", "def leaves(tree):\n\t\tfor subtree in tree.subtrees(filter = lambda t: t.label()=='NP'):\n\t\t# for subtree in tree.subtrees():\n\t\t\tyield subtree.leaves()", "def _sync_children(self, source_parent, destination_parent, new_child):\r\n destination_reordered = []\r\n destination_children = destination_parent['fields']['children']\r\n source_children = source_parent['fields']['children']\r\n orphans = set()\r\n for child in destination_children:\r\n try:\r\n source_children.index(child)\r\n except ValueError:\r\n orphans.add(child)\r\n for child in source_children:\r\n if child == new_child or child in destination_children:\r\n destination_reordered.append(child)\r\n destination_parent['fields']['children'] = destination_reordered\r\n return orphans", "def _sync_children(self, source_parent, destination_parent, new_child):\n destination_reordered = []\n destination_children = set(destination_parent.fields['children'])\n source_children = source_parent.fields['children']\n orphans = destination_children - set(source_children)\n for child in source_children:\n if child == new_child or child in destination_children:\n destination_reordered.append(child)\n destination_parent.fields['children'] = destination_reordered\n return orphans", "def reapChildren():\n\n for col in allLivingCollectors():\n now = int(time.time())\n status = col.proc.poll()\n if status is None:\n # The process hasn't terminated yet\n continue\n col.proc = None\n\n # behavior based on status. a code 0 is normal termination, code 13\n # is used to indicate that we don't want to restart this collector.\n # any other status code is an error and is logged.\n if status == 13:\n LOG.info('removing %s from the list of collectors (by request)',\n col.name)\n col.dead = True\n elif status != 0:\n LOG.warning('collector %s terminated after %d seconds with '\n 'status code %d, marking dead', col.name,\n now - col.lastspawn, status)\n col.dead = True\n else:\n LOG.debug('Reap collector : %s', col.name)\n registerCollector(Collector(col.name, col.interval, col.filename,\n col.mtime, col.lastspawn))", "def pruneSelfModifyingRelationships(markup):\n markupNew = markup.copy()\n modifiers = markup.getConTextModeNodes(\"modifier\")\n nodesToRemove = []\n for m in modifiers:\n modifiedBy = markup.successors(m)\n if( modifiedBy ):\n for mb in modifiedBy:\n if( TO.encompasses(mb,m) ):\n nodesToRemove.append(m)\n markupNew.remove_nodes_from(nodesToRemove)\n return markupNew", "def cull_tree(nodes_to_keep):\n [n.attrib.update({'current': 'true'}) for n in nodes_to_keep]\n all_parents = set()\n [all_parents.update(list(x.iterancestors()) + [x]) for x in nodes_to_keep]\n\n def test_inclusion(node, current):\n inclusion = node == current or node.tag in ['label', 'heading', 'cover', 'text']\n if not inclusion and node.tag == 'crosshead':\n # is the crosshead the first previous one?\n try:\n inclusion = node == current.itersiblings(tag='crosshead', preceding=True).next()\n except StopIteration:\n pass\n return inclusion or node in all_parents\n\n def fix_parents(node):\n while node.getparent() is not None:\n parent = node.getparent()\n to_remove = filter(lambda x: not test_inclusion(x, node), parent.getchildren())\n [parent.remove(x) for x in to_remove]\n node = parent\n [fix_parents(n) for n in nodes_to_keep]\n return nodes_to_keep[0].getroottree()", "def filter(iterable, filter_func):\n for item in iterable:\n item = filter_func(item)\n if item is not None:\n yield item", "def descendants(self):\r\n\r\n descendants = BuildFile.scan_buildfiles(self.root_dir, self.parent_path)\r\n for sibling in self.family():\r\n descendants.discard(sibling)\r\n return descendants", "def internal_clean_children(self, course_locator):\r\n original_structure = self._lookup_course(course_locator)['structure']\r\n for block in original_structure['blocks'].itervalues():\r\n if 'fields' in block and 'children' in block['fields']:\r\n block['fields'][\"children\"] = [\r\n block_id for block_id in block['fields'][\"children\"]\r\n if LocMapperStore.encode_key_for_mongo(block_id) in original_structure['blocks']\r\n ]\r\n self.db_connection.update_structure(original_structure)\r\n # clear cache again b/c inheritance may be wrong over orphans\r\n self._clear_cache(original_structure['_id'])", "def visit_expr(self, node, visited_children):\n output = []\n for child in visited_children[0]:\n output.append(child)\n return output", "def children(self) -> Iterable[Heirarchical]:\n return []", "def v7_lstrip(iterable, strip_value):\n iterator = iter(iterable)\n for item in iterator:\n if (callable(strip_value) and not strip_value(item)\n or not callable(strip_value) and item != strip_value):\n yield item\n break\n yield from iterator", "def decreasing_children(self, v):\n children = []\n root = None\n for i in range(v + 1, self.size() + 1):\n if not self.le(i, v):\n break\n if root is None or not self.le(i, root):\n children.append(i)\n root = i\n return children", "def __ngrams_with_all_eles(self, targets, partial_ngram):\n #An n-gram is constructed element by element and passed on to each of the child nodes.\n #When every target element has been added to the constructed n-gram, it will yield the complete n-gram which was passed to it by its parents.\n \n #If all targets were added to the constructed n-gram and this is a terminating node then yield the n-gram constructed so far.\n if len(targets) == 0 and self.end_of_ngram:\n yield partial_ngram\n\n #For each next element, construct the new partial n-gram and pass it to that element's child node, yielding every n-gram it yields.\n for ele in self.children:\n new_targets = targets - { ele }\n new_ngram = partial_ngram+(ele,)\n for ngram in self.children[ele].__ngrams_with_all_eles(new_targets, new_ngram):\n yield ngram", "def test_remove_node_from_children(self):\n\n this_node_table = copy.deepcopy(NODE_TABLE_WITH_CHILDREN)\n this_node_table = skeleton_lines._remove_node_from_children(\n node_table=this_node_table, target_node_index=0)\n\n self.assertTrue(_compare_tables(\n NODE_TABLE_NO_ZERO_CHILDREN, this_node_table))", "def __iter__(self):\n return iter(self._children)", "def __iter__(self):\n return iter(self.__children)", "def iterchildren(self, tag: Optional[str] = None) -> Iterator[E]:\n if tag == '*':\n tag = None\n for child in self:\n if tag is None or child.is_matching(tag):\n yield child", "def remove_undesired_children(parent_group_list, children_uid_list, children_label, verbose=False):\n new_parent_group = list()\n for parent_group in parent_group_list:\n new_parent = parent_group.copy()\n current_children_uids = json_extract_nested_ids(parent_group, children_label)\n # Get elements in current group which are not part of the children to use\n diff = list(set(current_children_uids).difference(children_uid_list))\n if len(diff) > 0: # There are elements which should not be there\n if verbose == True:\n logger.warning(parent_group['name'] + ' (' + parent_group['id'] +\n ') contains elements which do NOT belong to the package :' + str(diff))\n logger.warning('Elements will be removed from the group')\n # Get the required elements\n children_to_keep = list(set(current_children_uids).difference(diff))\n new_parent[children_label] = list()\n for uid in children_to_keep:\n new_parent[children_label].append({\"id\": uid})\n new_parent_group.append(new_parent)\n\n return new_parent_group", "def cleave_supervoxels_as_isolated_bodies(instance_info, sv_ids):\n logger.info(\"Fetching mapping for each SV\")\n body_ids = fetch_mapping(*instance_info, sv_ids, as_series=True)\n\n logger.info(\"Performing cleaves\")\n cleaved_ids = []\n for sv_id, body_id in tqdm(list(zip(sv_ids, body_ids))):\n try:\n cleaved_body = post_cleave(*instance_info, body_id, [sv_id])\n except requests.RequestException as ex:\n if 'cannot cleave all supervoxels from the label' in ex.response.content.decode():\n # Body has only one supervoxel to begin with\n cleaved_body = body_id\n else:\n sys.stderr.write(ex.response.content.decode())\n raise\n\n cleaved_ids.append( cleaved_body )\n \n return list(zip(sv_ids, body_ids, cleaved_ids))", "def filterGChildrenResponse(children):\n filteredChildren = []\n\n for child in children:\n\n filteredChildObject = {}\n\n filteredChildObject['id'] = child['id']\n filteredChildObject['name'] = child['name']\n\n if google.is_folder(child):\n filteredChildObject['children'] = []\n filteredChildObject['is_folder'] = True\n else:\n filteredChildObject['is_folder'] = False\n\n filteredChildren.append(filteredChildObject)\n\n return filteredChildren", "def all_children_sorted_by_tags(self, include_tags, exclude_tags):\n\n for child in self.all_children_sorted():\n if child.line_inclusion_test(include_tags, exclude_tags):\n yield child", "def remove_and_preserve_children(self):\r\n \r\n if self.parent and hasattr(self.parent, 'add_child)'):\r\n for child in self._children:\r\n #Correct offsets\r\n child.x += self.x\r\n child.y += self.y\r\n self.parent.add_child(child)\r\n self._children = []\r\n self.remove()", "def pruneSelfModifyingRelationships(self):\n modifiers = self.getConTextModeNodes(\"modifier\")\n nodes_to_remove = []\n for modifier in modifiers:\n modified_by = self.successors(modifier)\n if modified_by:\n for mod_by in modified_by:\n if self.getVerbose():\n print(mod_by, modifier, mod_by.encompasses(modifier))\n if mod_by.encompasses(modifier):\n nodes_to_remove.append(modifier)\n if self.getVerbose():\n print(\"removing the following self modifying nodes\", nodes_to_remove)\n self.remove_nodes_from(nodes_to_remove)", "def remove_nodes(self, nodes):\r\n new_leaves = set()\r\n for node in nodes:\r\n if node not in self.nodes:\r\n raise Exception(\"Attempting to remove invalid node: %s\" % node.data.id)\r\n for parent_node in node.parents:\r\n if parent_node in nodes:\r\n continue\r\n parent_node.children.remove(node)\r\n if not parent_node.children:\r\n new_leaves.add(parent_node)\r\n\r\n # Do these outside in case 'nodes' is in fact self.leaves, so that we don't change the set we're iterating over.\r\n self.leaves -= nodes\r\n self.leaves.update(new_leaves)\r\n return new_leaves", "def postorder_iterator(node):\n for child in node.children:\n yield from postorder_iterator(child)\n yield node", "def iter_leaf(self):\n for elem in self[1:]:\n if isinstance(elem, Tree):\n for elem2 in elem.iter_leaf:\n yield elem2\n else:\n yield elem", "def remove(self, children=None):\n if children is not None and self.children:\n logging.warning('%s is being removed by remove(children=%s), '\n ' but it has (unexpected) children', self, children)\n self.bundle.trees = [root for root in self.bundle.trees if root != self]", "def instrs(self):\n x = self._firstInstr\n while x is not None:\n # now we can remove x and continue iterating :)\n x_next = x.next\n yield x\n x = x_next", "def GetChildren( self ):\n children = [\n cWrpr \n for cWrpr in GameNodePath.GetChildren( self ) \n if not cWrpr.data.getPythonTag( TAG_IGNORE )\n ]\n return children", "def children(g, parent):\n res = set()\n\n if isinstance(parent, Production):\n prods = [parent]\n else:\n prods = g.productions(parent)\n\n for prod in prods:\n for item in prod.rhs():\n if is_nonterminal(item):\n res.add(item)\n\n return res", "def test_remove_all_values2(delete_tree):\n delete_tree.remove(\"ted\")\n delete_tree.remove(\"teabagged\")\n delete_tree.remove(\"tea\")\n delete_tree.remove(\"teabag\")\n delete_tree.remove(\"teabagger\")\n delete_tree.remove(\"teabags\")\n delete_tree.remove(\"teabaggers\")", "def v8_lstrip(iterable, strip_value):\n iterator = iter(iterable)\n if callable(strip_value):\n predicate = strip_value\n else:\n def predicate(value): return value == strip_value\n for item in iterator:\n if not predicate(item):\n yield item\n break\n yield from iterator", "def v6_lstrip(iterable, strip_value):\n iterator = iter(iterable)\n for item in iterator:\n if item != strip_value:\n yield item\n break\n for item in iterator:\n yield item", "def recipe12_6():\n from xml import dom\n def remove_whitespace_nodes(node):\n \"\"\" Removes all of the whitespace-only text descendants of a DOM node. \"\"\"\n # prepare the list of text nodes to remove (and recurse when needed)\n remove_list=[]\n for child in node.childNodes:\n if child.nodeType==dom.Node.TEXT_NODE and not child.data.strip():\n # add this text node to the to-be-removed list\n remove_list.append(chid)\n elif child.hasChildNodes():\n # recurse, it's the simplest way to deal with the subtree\n remove_whitespace_nodes(child)\n # perform the removals\n for node in remove_list:\n node.parentNode.removeChild(node)\n node.unlink()", "def children(self): # noqa: ANN201", "def _get_children(self):\n return set()", "def _children(self):\n for codeobj in self.body:\n if isinstance(codeobj, CodeEntity):\n yield codeobj", "def get_children(cls, node: ast.AST) -> Iterable[ast.AST]:\n body: Optional[Sequence[ast.AST]] = getattr(node, 'body', None)\n if body is not None:\n for child in body:\n yield child", "def flat(self):\n to_clean = []\n while self.nodes:\n head, children = self.nodes.popitem(0)\n to_clean.extend([x for x in self._get_leafs(head, children)])\n return to_clean", "def descendants(self):\n yield self\n for child in self.children:\n if isinstance(child, ParentBox):\n for grand_child in child.descendants():\n yield grand_child\n else:\n yield child", "def removeChildren(self):\n return _libsbml.XMLNode_removeChildren(self)", "def filtered_preorder_iterator(node, select=None, ignore=None):\n if ignore and ignore(node):\n return\n if select is None or select(node):\n yield node\n for child in node.children:\n yield from filtered_preorder_iterator(child, select, ignore)", "def _iter_legend_children(children):\n for obj in children:\n if hasattr(obj, '_children'):\n yield from _iter_legend_children(obj._children)\n else:\n yield obj", "def _remove_dangling_downstream_nodes(\n node: p_pb2.PipelineNode,\n node_ids_to_keep: Collection[str]) -> p_pb2.PipelineNode:\n # Using a loop instead of set intersection to ensure the same order.\n downstream_nodes_to_keep = [\n downstream_node for downstream_node in node.downstream_nodes\n if downstream_node in node_ids_to_keep\n ]\n if len(downstream_nodes_to_keep) == len(node.downstream_nodes):\n return node\n result = p_pb2.PipelineNode()\n result.CopyFrom(node)\n result.downstream_nodes[:] = downstream_nodes_to_keep\n return result", "def _recurse_children(self, offset):\n while offset < self.obj_offset + self.Length:\n item = obj.Object(\"VerStruct\", offset = offset, vm = self.obj_vm, parent = self)\n if item.Length < 1 or item.get_key() == None:\n raise StopIteration(\"Could not recover a key for a child at offset {0}\".format(item.obj_offset))\n yield item.get_key(), item.get_children()\n offset = self.offset_pad(offset + item.Length)\n raise StopIteration(\"No children\")", "def compact(seq):\n for item in seq:\n if item:\n yield item", "def leaves_iterator(node):\n if 0 == node.children_count:\n yield node\n else:\n for child in node.children:\n yield from leaves_iterator(child)", "def unisolvent_nodes(self):\r\n pass", "def __iter__(self):\n for tree in self._tree.subTrees():\n yield self.__class__(tree)", "def remove_child_nodes(self, id):\r\n children = self.get_node_by_id(id).children\r\n self.nodes = [ n for n in self.nodes if n.id!=id ]\r\n if len(children)>0:\r\n for c in children:\r\n self.remove_child_nodes(c.id)", "def leaf_nodes(self):\n deps = set([\n item for sublist in self.edges.values() for item in sublist\n ]) # Now contains all nodes that contain dependencies.\n return (x for x in self.nodes if x not in deps) # Generator that\n # contains all nodes *without* any dependencies (leaf nodes)", "def _leaves(tree):\n for st in tree.subtrees(filter=lambda t: t.label() in ['NP', 'JJ', 'RB']):\n yield st.leaves()", "def descendants(self):\n for child in self.children:\n yield child\n if isinstance(child, LoggedAction):\n for descendant in child.descendants():\n yield descendant" ]
[ "0.742417", "0.596061", "0.59555763", "0.5783024", "0.5576109", "0.5383288", "0.5338851", "0.5332447", "0.5312418", "0.5300283", "0.52990466", "0.52795494", "0.5277477", "0.5260708", "0.5232784", "0.5212718", "0.5207777", "0.52073103", "0.5153014", "0.5149678", "0.5128579", "0.5125742", "0.508858", "0.50870097", "0.5082399", "0.5055036", "0.5052121", "0.5044605", "0.5037829", "0.5037775", "0.5033895", "0.50328445", "0.50321615", "0.5013264", "0.5010879", "0.5010718", "0.50076705", "0.50042015", "0.49900097", "0.49795538", "0.49646062", "0.4963017", "0.49586973", "0.4955997", "0.49486795", "0.49468502", "0.494428", "0.4939553", "0.49350983", "0.4930943", "0.49174225", "0.4914232", "0.49115515", "0.49115407", "0.49091944", "0.4905535", "0.48961365", "0.4892619", "0.4889201", "0.48886317", "0.48875105", "0.48864225", "0.48850045", "0.48777512", "0.48772135", "0.4873806", "0.48725924", "0.4872242", "0.486948", "0.48655018", "0.48650584", "0.48634505", "0.4859847", "0.48459828", "0.48397264", "0.4837372", "0.48342845", "0.48336977", "0.48306495", "0.48274213", "0.48199534", "0.4818448", "0.4815246", "0.48146012", "0.48110572", "0.4809844", "0.48079443", "0.48037487", "0.48034492", "0.48008722", "0.47989607", "0.47981673", "0.47973594", "0.4793164", "0.47847995", "0.47847387", "0.47834527", "0.47809985", "0.47736558", "0.4772807" ]
0.71860904
1
A convenience wrapper for `visit_iterable` that returns a sequence instead of an iterable.
def visit_sequence( parent: "CSTNode", fieldname: str, children: Sequence[CSTNodeT], visitor: "CSTVisitorT", ) -> Sequence[CSTNodeT]: return tuple(visit_iterable(parent, fieldname, children, visitor))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def toiter(x):\n if iterable(x):\n return x\n else:\n return [x]", "def concrete(seq):\n if isinstance(seq, Iterator):\n seq = list(seq)\n if isinstance(seq, (tuple, list)):\n seq = list(map(concrete, seq))\n return seq", "def make_iterable(arg):\n return arg if is_iterable(arg) else (arg,)", "def simple_seq(seq):\n for i in seq:\n yield i", "def generator_wrapper(iterable):\n\n num_items = len(iterable)\n for idx in range(num_items):\n yield iterable[idx]", "def actually_flatten(iterable):\n remainder = iter(iterable)\n while True:\n first = next(remainder) # pylint: disable=R1708\n # Python 2/3 compat\n is_iter = isinstance(first, collections.Iterable)\n try:\n basestring\n except NameError:\n basestring = str # pylint: disable=W0622\n\n if is_py3() and is_iter and not_a_string(first):\n remainder = IT.chain(first, remainder)\n elif (not is_py3()) and is_iter and not isinstance(first, basestring):\n remainder = IT.chain(first, remainder)\n else:\n yield polite_string(first)", "def gather(iterable: Iterable[Maybe[A]]) -> Maybe[Iterable[A]]:\n return cast(Maybe[Iterable[A]], sequence_(Just, iterable))", "def ensure_list(iterable: Iterable[A]) -> List[A]:\n if isinstance(iterable, list):\n return iterable\n else:\n return list(iterable)", "def ensure_list(iterable: Iterable[A]) -> List[A]:\n if isinstance(iterable, list):\n return iterable\n else:\n return list(iterable)", "def get_iterable(obj):\n if obj is None:\n return ()\n\n if (isinstance(obj, collections.Iterable) and\n not isinstance(obj, six.string_types)):\n return obj\n\n return (obj,)", "def caboose(seq: Iterable[TItem], el: TElement) -> Iterable[Union[TElement, TItem]]:\n yield from seq\n yield el", "def flatten_iterator(x: Iterable[_T]) -> Iterator[_T]:\n elem: _T\n for elem in x:\n if not isinstance(elem, str) and hasattr(elem, \"__iter__\"):\n yield from flatten_iterator(elem)\n else:\n yield elem", "def _ensure_iterable(x):\n if isinstance(x[0], Iterable):\n if len(x) > 1:\n raise TypeError(\"Either Iterable or variable argument list expected\")\n return x[0]\n else:\n return x", "def flatten(l):\n for el in l:\n if isinstance(el, collections.Iterable) and not isinstance(el, (str, bytes)):\n yield from flatten(el)\n else:\n yield el", "def flatten(iterable):\n return [x for x in actually_flatten(iterable)]", "def flatten(iterable):\n for element_iterable in iterable:\n for element in element_iterable:\n yield element", "def flatten_as_list(iterable):\n return list(chain(*iterable))", "def PeekIterable(iterable):\n try:\n head_element = iterable.next()\n new_iterable = itertools.chain([head_element], iterable)\n return head_element, new_iterable\n except StopIteration:\n return None, iterable", "def safe_iterator(i):\n return i or []", "def __iter__(self):\n return iter(self.to_list())", "def builtin_iterable(func):\n if sys.version_info[:1] < (3,):\n @wraps(func)\n def inner(*args, **kwargs):\n return list(func(*args, **kwargs))\n return inner\n return func", "def test_iteriter_op_1():\n\n @ops.iteriter_op\n def f(x):\n return iter([4, 5, 6])\n\n result = f(iter([1, 2, 3])) # Passing in an iterator, as expected\n\n assert(isinstance(result, collections.abc.Iterator)), f\"{result}\"\n assert(list(result) == [4, 5, 6])", "def makeiter(obj):\n if not obj:\n return []\n if not isiterable(obj):\n return [obj]\n return obj", "def fromiter(iterable, dtype, count=-1):\n\n return call_origin(numpy.fromiter, iterable, dtype, count)", "def test_listiter_op_1():\n\n @ops.listiter_op\n def f(x):\n return iter([4, 5, 6])\n\n result = f([1, 2, 3]) # Passing in a list, as expected\n\n assert(isinstance(result, collections.abc.Iterator)), f\"{result}\"\n assert(list(result) == [4, 5, 6])", "def flatten(iterable):\n return it.chain.from_iterable(iterable)", "def from_sequence(self, seq):\n return Iter(self._from_sequence(seq))", "def to_iterable(\n stage: Stage = pypeln_utils.UNDEFINED, maxsize: int = 0\n) -> typing.Iterable:\n\n if pypeln_utils.is_undefined(stage):\n return pypeln_utils.Partial(lambda stage: to_iterable(stage, maxsize=maxsize))\n\n if isinstance(stage, Stage):\n iterable = stage.to_iterable(maxsize=maxsize)\n else:\n iterable = stage\n\n return iterable", "def flow_to_iter(flow):\n if ((sys.version_info.major == 3 and hasattr(flow, \"__next__\"))\n or (sys.version_info.major == 2 and hasattr(flow, \"next\"))):\n return flow\n else:\n return iter(flow)", "def test_iterlist_op_1():\n\n @ops.iterlist_op\n def f(x):\n return [4, 5, 6]\n\n result = f(iter([1, 2, 3])) # Passing in an iterator, as expected\n\n assert(isinstance(result, list)), f\"{result}\"\n assert(result == [4, 5, 6])", "def test_shortcut_nested_with_iterable_subclass():\n\n class MyEvent:\n def MyTracks(self) -> _itsb_FADLStream[_itsb_MyTrack]:\n ...\n\n s = ast_lambda(\n \"ds.Select(lambda e: e.MyTracks()).Select(lambda ts: ts.Select(lambda t: t.pt()))\"\n )\n objs = ObjectStream[Iterable[MyEvent]](ast.Name(id=\"ds\", ctx=ast.Load()))\n\n new_objs, new_s, expr_type = remap_by_types(objs, \"ds\", Iterable[MyEvent], s)\n\n assert ast.dump(new_s) == ast.dump(\n ast_lambda(\n \"ds.Select(lambda e: e.MyTracks()).Select(lambda ts: ts.Select(lambda t: t.pt()))\"\n )\n )\n # assert ast.dump(new_objs.query_ast) == ast.dump(\n # ast_lambda(\"MetaData(e, {'t': 'track stuff'})\")\n # )\n assert expr_type == Iterable[Iterable[float]]", "def flatten(sequence):\n\n if not isinstance(sequence, (list, tuple)):\n raise TypeError('Support only an instance of list or tuple')\n\n for i in sequence:\n if isinstance(i, (list, tuple)):\n yield from flatten(i)\n else:\n yield i", "def as_list(gen):\n return list(gen())", "def flatten(l):\n for el in l:\n if isinstance(el, Iterable) and not isinstance(el, (str, bytes)):\n for sub in flatten(el):\n yield sub\n else:\n yield el", "def to_iterable(stage=pypeln_utils.UNDEFINED, maxsize=0):\n\n if utils.is_undefined(stage):\n return utils.Partial(lambda stage: _to_iterable(stage, maxsize))\n else:\n return _to_iterable(stage, maxsize)", "def slice(iterable, *args):\n return iter(it.islice(iterable, *args))", "def mkiter(item):\n # FIXME: don't really need to construct a list\n if item is None:\n return iter(())\n elif isIterable(item):\n return iter(item)\n else:\n return iter([item])", "def flatmap(iterable, function_to_list):\n for element in iterable:\n list_block = function_to_list(element)\n for result_value in list_block:\n yield result_value", "def flatmap(func, *iterable) -> Iterator:\n return map(func, chain(*chain(*iterable)))", "def listify(x):\n if (not isinstance(x, basestring)) and isinstance(x, Sequence):\n return x\n else:\n return [x]", "def deep_flatten(iterable):\n pass", "def pick(iterable):\n for element in iterable:\n yield element\n while True:\n yield element", "def to_list(x):\n import collections\n if not isinstance(x, collections.Iterable) or isinstance(x, str):\n x = [x]\n return x", "def get_consistent_generator(iterable):\n try:\n first = next(iterable)\n except StopIteration:\n return None\n\n if first is None:\n return None\n\n return itertools.chain([first], iterable)", "def accept(self, visitor: Any) -> Any:\n visitor.visit_entire_sequence(self)", "def peek(iterable, size=1):\r\n objs = []\r\n for _ in range(size):\r\n try:\r\n obj = next(iterable)\r\n except StopIteration:\r\n break\r\n objs.append(obj)\r\n return objs, itertools.chain(objs, iterable)", "def iterable(arg):\n return isinstance(arg, collections.Iterable) and not isinstance(arg, six.string_types)", "def flatmap2(func, *iterable) -> Iterator:\n return map(func, chain(*chain(*chain(*iterable))))", "def _assert_valid_value_and_cast(self, value):\n if not hasattr(value, '__iter__'):\n raise AssertionError(f\"{value} is not a valid iterable type\")\n return value", "def scan(func, iterable, start=_EMPTY, *, echo_start=True):\n it = iter(iterable)\n if start is _EMPTY:\n start = next(it)\n if echo_start:\n yield start\n for item in it:\n start = func(start, item)\n yield start", "def uniform_iterator(sequence):\n\n if isinstance(sequence, abc.Mapping):\n return six.iteritems(sequence)\n else:\n return enumerate(sequence)", "def from_iterable(iterable: Iterable) -> ObservableBase:\n from ..operators.observable.fromiterable import from_iterable\n return from_iterable(iterable)", "def test_collect(\n self, iterable: t.Iterable[Result[int, str]], exp: Result[int, str]\n ) -> None:\n assert Result.collect(iterable) == exp", "def to_seq (value):\n if not value:\n return []\n\n if isinstance (value, str):\n return [value]\n\n else:\n return value", "def flatten(iterable):\n return chain(*iterable)", "def from_iterable(self, iterable):\n raise NotImplementedError()", "def flatten(c):\n for x in c:\n if isinstance(x, str) or not isinstance(x, Iterable):\n yield x\n else:\n yield from flatten(x)", "def __call__(self, iterable):\n if self._ordered:\n imap = self._distrubtor.imap\n else:\n imap = self._distrubtor.imap_unordered\n\n for result in imap(iterable):\n yield result", "def batch_iter(iterable: Iterable[T], size: int) -> Iterator[Tuple[T]]:\n # make sure we can deal with iterables like lists too\n sourceiter = iter(iterable)\n # call islice until it returns an empty tuple\n return iter(lambda: tuple(islice(sourceiter, size)), ())", "def __iter__(self):\n for x in self.seq: yield x", "def flatten_stream(chunk_stream: collections.Iterable) -> collections.Iterable:\r\n return chain.from_iterable(chunk_stream)", "def flatten(items):\n for x in items:\n if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):\n for sub_x in flatten(x):\n yield sub_x\n else:\n yield x", "def visit_iterable(\n parent: \"CSTNode\",\n fieldname: str,\n children: Iterable[CSTNodeT],\n visitor: \"CSTVisitorT\",\n) -> Iterable[CSTNodeT]:\n visitor.on_visit_attribute(parent, fieldname)\n for child in children:\n new_child = child.visit(visitor)\n if isinstance(new_child, FlattenSentinel):\n yield from new_child\n elif not isinstance(new_child, RemovalSentinel):\n yield new_child\n visitor.on_leave_attribute(parent, fieldname)", "def iter_pairs(iterable):\n if isinstance(iterable, Mapping):\n iterable = iterable.items()\n return iter(iterable)", "def flatten(seq):\n \n ret = []\n def _flatten(seq):\n for i in seq:\n if isinstance(i, (list, tuple)):\n _flatten(i)\n else:\n ret.append(i)\n return ret\n \n if isinstance(seq, tuple):\n return tuple(_flatten(seq))\n \n return _flatten(seq)", "def parse(x):\n if isinstance(x, container_abcs.Iterable):\n return x\n return tuple(repeat(x, n))", "def take(num, iterable):\n return list(islice(iterable, num))", "async def _aiter_sync(iterable: Iterable[T]) -> AsyncIterator[T]:\n for item in iterable:\n yield item", "def _to_list(series: Union[TimeSeries, Sequence[TimeSeries]]) -> Sequence[TimeSeries]:\n\n return [series] if not isinstance(series, Sequence) else series", "def iter_cast(inputs, dst_type, return_type=None):\n if not isinstance(inputs, abc.Iterable):\n raise TypeError(\"inputs must be an iterable object\")\n if not isinstance(dst_type, type):\n raise TypeError('\"dst_type\" must be a valid type')\n\n out_iterable = map(dst_type, inputs)\n\n if return_type is None:\n return out_iterable\n else:\n return return_type(out_iterable)", "def __iter__(self):\n # type: () -> Iterator[Any]\n return iter(self[index] for index in range(len(self)))", "def get(s: Iterable[T]) -> T:\n return next(iter(s))", "def intercept(iterable, function):\n\n def intercepting(iterable_):\n for item in iterable_:\n function(item)\n yield item\n\n return intercepting(iterable)", "def _iterator_codegen(resty):\n\n def codegen(context, builder, sig, args):\n [d] = args\n [td] = sig.args\n iterhelper = context.make_helper(builder, resty)\n iterhelper.parent = d\n iterhelper.state = iterhelper.state.type(None)\n return impl_ret_borrowed(\n context,\n builder,\n resty,\n iterhelper._getvalue(),\n )\n\n return codegen", "def flatten(*seqs):\n for item in itertools.chain.from_iterable(seqs):\n if iscollection(item):\n yield from item\n else:\n yield item", "def item_iter(self, a):\r\n for i in a[0]:\r\n yield i", "def toSequence(self):\n return list(self.iteritems())", "def first(iterable: t.Iterable[T]) -> T:\n return next(iter(iterable))", "def with_iter(contextmanager):\n with contextmanager as iterable:\n for item in iterable:\n yield item", "def _items(mappingorseq):\n if hasattr(mappingorseq, 'items'):\n return iteritems(mappingorseq)\n return mappingorseq", "def maybe_generator(obj):\n if isinstance(obj, types.GeneratorType):\n for elt in obj:\n yield elt\n else:\n yield obj", "def getIter(object):\n iterator = None\n try:\n iterator = iter(object)\n except TypeError:\n pass\n return iterator", "def map(iterable, function):\n for x in iterable:\n yield function(x)", "def to_list():\n\n @sinks\n def _dagpype_internal_fn_act(target):\n l = []\n try:\n while True:\n l.append((yield))\n except GeneratorExit:\n target.send(l) \n target.close()\n\n return _dagpype_internal_fn_act", "def is_iterable(obj):\n return isinstance(obj, (list, tuple, types.GeneratorType)) or \\\n (not isinstance(obj, (int, str, dict)) and\n bool(getattr(obj, \"next\", False)))", "async def coerce_agen(gen):\n results = []\n async with multio.finalize_agen(gen) as agen:\n async for i in agen:\n results.append(i)\n\n return results", "def get_only(seq: Iterable[T]) -> T:\n it = iter(seq)\n try:\n first_element = it.__next__()\n # we use the sentinel approach rather than the usual (evil) Python \"attempt can catch the\n # exception\" approach to avoid raising zillions of spurious exceptions on the expected\n # code path, which makes debugging a pain\n sentinel = object()\n second_element = next(it, sentinel)\n if second_element is sentinel:\n return first_element\n else:\n got_msg: str\n if isinstance(seq, Sized):\n got_msg = str_list_limited(seq, limit=10)\n else:\n got_msg = f\"{first_element!r}, {second_element!r}, and possibly more.\"\n raise ValueError(f\"Expected one item in sequence but got {got_msg}\")\n except StopIteration:\n raise ValueError(\"Expected one item in sequence but got none\")", "def item_iter(self, a):\n for i in a[0]:\n yield i", "def flatten(list_):\n for elem in list_:\n if type(elem) != list:\n yield elem\n else:\n yield from flatten(elem)", "def _NextItem(self):\n if self._injected:\n self._injected = False\n return self._injected_value\n try:\n # Object is a generator or iterator.\n return self._iterable.next()\n except AttributeError:\n pass\n except StopIteration:\n self._tap.Done()\n raise\n try:\n # Object is a list.\n return self._iterable.pop(0)\n except (AttributeError, KeyError, TypeError):\n pass\n except IndexError:\n self._tap.Done()\n raise StopIteration\n # Object is not iterable -- treat it as the only item.\n if self._iterable is None or self._stop:\n self._tap.Done()\n raise StopIteration\n self._stop = True\n return self._iterable", "def is_iterable_object(maybe_iterable: Any) -> TypeGuard[Iterable[Any]]:\n\n return isinstance(maybe_iterable, Iterable)", "def chunk_seq(iseq: ISeq, maxlen: int) -> Iterable[ISeq]:\n return (iseq[i : i + maxlen] for i in range(0, len(iseq), maxlen))", "def convert_yielded(yielded):\n # Lists and dicts containing YieldPoints were handled earlier.\n if isinstance(yielded, (list, dict)):\n return multi(yielded)\n elif is_future(yielded):\n return yielded\n else:\n print(\"yielded unknown object %r\" % (yielded, ))\n raise BadYieldError(\"yielded unknown object %r\" % (yielded,))", "def to_list(x, repeat=1):\n if isinstance(x, (Generator, tuple, set)):\n return list(x)\n elif isinstance(x, list):\n return x\n elif isinstance(x, dict):\n return list(x.values())\n elif x is not None:\n return [x] * repeat\n else:\n return []", "def flatten_as_tuple(iterable):\n return tuple(chain(*iterable))", "def flatten(nested_iterable):\n\n for item in nested_iterable:\n if hasattr(item, '__iter__') and not isinstance(item, str):\n for i in flatten(item):\n yield i\n else:\n yield item", "def prepend(value, iterable):\n yield value\n yield from iterable", "def iter_sequence_infinite(seq):\n while True:\n for item in seq:\n yield item", "def get_iterable_itemtype(obj):\n # support further specific iterables on demand\n if isinstance(obj, _typechecked_Iterable):\n return obj.itemtype\n try:\n if isinstance(obj, range):\n tpl = tuple(deep_type(obj.start), deep_type(obj.stop), deep_type(obj.step))\n return Union[tpl]\n except TypeError:\n # We're running Python 2\n pass\n if type(obj) is tuple:\n tpl = tuple(deep_type(t) for t in obj)\n return Union[tpl]\n elif type(obj) is types.GeneratorType:\n return get_generator_yield_type(obj)\n else:\n tp = deep_type(obj)\n if is_Generic(tp):\n if issubclass(tp.__origin__, Iterable):\n if len(tp.__args__) == 1:\n return tp.__args__[0]\n return _select_Generic_superclass_parameters(tp, Iterable)[0]\n if is_iterable(obj):\n if type(obj) is str:\n return str\n if hasattr(obj, '__iter__'):\n if has_type_hints(obj.__iter__):\n itrator = _funcsigtypes(obj.__iter__, True, obj.__class__)[1]\n if is_Generic(itrator) and itrator.__origin__ is _orig_Iterator:\n return itrator.__args__[0]\n if hasattr(obj, '__getitem__'):\n if has_type_hints(obj.__getitem__):\n itrator = _funcsigtypes(obj.__getitem__, True, obj.__class__)[1]\n if is_Generic(itrator) and itrator.__origin__ is _orig_Iterator:\n return itrator.__args__[0]\n return None # means that type is unknown\n else:\n raise TypeError('Not an iterable: '+str(type(obj)))", "def is_iterable(var):\n return any(isinstance(var, cls) for cls in [list, tuple, types.GeneratorType])" ]
[ "0.71035177", "0.6873974", "0.66026473", "0.64714134", "0.61446095", "0.61344165", "0.61129576", "0.60355216", "0.60355216", "0.5988719", "0.5987896", "0.5984973", "0.5884642", "0.58802277", "0.5814334", "0.5812307", "0.57950664", "0.57258046", "0.56900185", "0.56784594", "0.5639", "0.5626846", "0.5611053", "0.5574864", "0.5568074", "0.5566418", "0.5561001", "0.55512595", "0.5517324", "0.5509419", "0.5501493", "0.549056", "0.54854304", "0.5479162", "0.5474574", "0.5461402", "0.5442181", "0.5440461", "0.5435595", "0.54230446", "0.5403423", "0.5398481", "0.53897643", "0.5373396", "0.5361281", "0.5360799", "0.5352045", "0.5344992", "0.53362256", "0.53194636", "0.5311346", "0.5305125", "0.5283666", "0.52834433", "0.5278143", "0.5271262", "0.5268693", "0.5258693", "0.5234684", "0.5225347", "0.5210818", "0.52019167", "0.51979107", "0.5195756", "0.51836026", "0.5178891", "0.5174219", "0.5171762", "0.5166138", "0.5162456", "0.5150793", "0.5143052", "0.51411843", "0.5136272", "0.5130336", "0.51229787", "0.5119312", "0.51157075", "0.5100772", "0.50999933", "0.5079796", "0.5077937", "0.50743085", "0.5072005", "0.5069082", "0.50664586", "0.5066113", "0.5064227", "0.50532985", "0.50455385", "0.504309", "0.50424254", "0.50375074", "0.50373966", "0.5036848", "0.50317633", "0.50315344", "0.50306875", "0.50291103", "0.5023213" ]
0.5409288
40
Similar to visit_iterable above, but capable of discarding empty SimpleStatementLine nodes in order to preserve correct pass insertion behavior.
def visit_body_iterable( parent: "CSTNode", fieldname: str, children: Sequence[CSTNodeT], visitor: "CSTVisitorT", ) -> Iterable[CSTNodeT]: visitor.on_visit_attribute(parent, fieldname) for child in children: new_child = child.visit(visitor) # Don't yield a child if we removed it. if isinstance(new_child, RemovalSentinel): continue # Don't yield a child if the old child wasn't empty # and the new child is. This means a RemovalSentinel # caused a child of this node to be dropped, and it # is now useless. if isinstance(new_child, FlattenSentinel): for child_ in new_child: if (not child._is_removable()) and child_._is_removable(): continue yield child_ else: if (not child._is_removable()) and new_child._is_removable(): continue # Safe to yield child in this case. yield new_child visitor.on_leave_attribute(parent, fieldname)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def visit_simple_stmt(self, node: Node) -> Iterator[Line]:\n prev_type: Optional[int] = None\n for child in node.children:\n if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child):\n wrap_in_parentheses(node, child, visible=False)\n prev_type = child.type\n\n is_suite_like = node.parent and node.parent.type in STATEMENT\n if is_suite_like:\n if (\n self.mode.is_pyi or Preview.dummy_implementations in self.mode\n ) and is_stub_body(node):\n yield from self.visit_default(node)\n else:\n yield from self.line(+1)\n yield from self.visit_default(node)\n yield from self.line(-1)\n\n else:\n if (\n not (self.mode.is_pyi or Preview.dummy_implementations in self.mode)\n or not node.parent\n or not is_stub_suite(node.parent)\n ):\n yield from self.line()\n yield from self.visit_default(node)", "def gen_empty_statement(self, statement) -> None:\n pass", "def collect_inside_statements(self, node):\n new_body = [[], []]\n is_outside = False\n starting_col = self.get_column(node)\n for child in node.body:\n if not isinstance(child, EmptyLine) and self.get_column(child) <= starting_col:\n is_outside = True\n new_body[is_outside].append(child)\n while new_body[0] and isinstance(new_body[0][-1], EmptyLine):\n new_body[1].insert(0, new_body[0].pop())\n return new_body", "def emptyline(self):", "def visit_default(self, node: LN) -> Iterator[Line]:\n if isinstance(node, Leaf):\n any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()\n for comment in generate_comments(node):\n if any_open_brackets:\n # any comment within brackets is subject to splitting\n self.current_line.append(comment)\n elif comment.type == token.COMMENT:\n # regular trailing comment\n self.current_line.append(comment)\n yield from self.line()\n\n else:\n # regular standalone comment\n yield from self.line()\n\n self.current_line.append(comment)\n yield from self.line()\n\n normalize_prefix(node, inside_brackets=any_open_brackets)\n if self.mode.string_normalization and node.type == token.STRING:\n node.value = normalize_string_prefix(node.value)\n node.value = normalize_string_quotes(node.value)\n if node.type == token.NUMBER:\n normalize_numeric_literal(node)\n if node.type not in WHITESPACE:\n self.current_line.append(node)\n yield from super().visit_default(node)", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def emptyline(self):\n pass", "def line_generator(self):\n for V in self.Vrepresentation():\n if V.is_line():\n yield V", "def _analyse_stmt_Pass(self, statement: ast.Pass, *, next: CFNode) -> CFNode:\n return self._ast_node(statement, next=next)", "def body(self, statements):\n for stmt in statements:\n self.current_context = None\n self.visit(stmt)\n if self.current_label is not None:\n # Create a noop statement to hold the last label:\n self.create_stmt(dast.NoopStmt, statements[-1], nopush=True)", "def emptyline(self):\n self.do_ls(\"\")", "def line(self, indent: int = 0) -> Iterator[Line]:\n if not self.current_line:\n self.current_line.depth += indent\n return # Line is empty, don't emit. Creating a new one unnecessary.\n\n if (\n Preview.improved_async_statements_handling in self.mode\n and len(self.current_line.leaves) == 1\n and is_async_stmt_or_funcdef(self.current_line.leaves[0])\n ):\n # Special case for async def/for/with statements. `visit_async_stmt`\n # adds an `ASYNC` leaf then visits the child def/for/with statement\n # nodes. Line yields from those nodes shouldn't treat the former\n # `ASYNC` leaf as a complete line.\n return\n\n complete_line = self.current_line\n self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)\n yield complete_line", "def _strip_lines(lines):\n for line in lines:\n stripped = line.strip()\n if stripped:\n yield stripped", "def parse_block_scalar_empty_line(indent_token_class, content_token_class):\n def callback(lexer, match, context):\n text = match.group()\n if (context.block_scalar_indent is None or\n len(text) <= context.block_scalar_indent):\n if text:\n yield match.start(), indent_token_class, text\n else:\n indentation = text[:context.block_scalar_indent]\n content = text[context.block_scalar_indent:]\n yield match.start(), indent_token_class, indentation\n yield (match.start()+context.block_scalar_indent,\n content_token_class, content)\n context.pos = match.end()\n return callback", "def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:\n yield from self.line()", "def emptyline(self):\n return", "def emptyline(self):\n return", "def visit_nullary(spec):", "def Statements(self):\n states = list()\n while self.currtok[1].name in {\"SEMI\", \"LCURLY\", \"IDENT\", \"if\", \"print\", \"while\", \"return\"}:\n state = self.Statement()\n states.append(state)\n return StatementsStmt(states)", "def _analyse_statements(\n self, statements: List[ast.stmt], *, next: CFNode\n ) -> CFNode:\n for statement in reversed(statements):\n analyse = getattr(self, \"_analyse_stmt_\" + type(statement).__name__)\n next = analyse(statement, next=next)\n return next", "def noop(self, stmt, p_elem, pset=''):\n pass", "def statements(self):\n node = self.annotated_ast_node\n nodes_subtexts = list(_split_code_lines(node.body, self.text))\n if nodes_subtexts == [(self.ast_node.body, self.text)]:\n # This block is either all comments/blanks or a single statement\n # with no surrounding whitespace/comment lines. Return self.\n return (PythonStatement._construct_from_block(self),)\n cls = type(self)\n statement_blocks = [\n cls.__construct_from_annotated_ast(subnodes, subtext, self.flags)\n for subnodes, subtext in nodes_subtexts]\n # Convert to statements.\n statements = []\n for b in statement_blocks:\n statement = PythonStatement._construct_from_block(b)\n statements.append(statement)\n # Optimization: set the new sub-block's ``statements`` attribute\n # since we already know it contains exactly one statement, itself.\n assert 'statements' not in b.__dict__\n b.statements = (statement,)\n return tuple(statements)", "def __iter__(self):\n return iter(self._statements)", "def _rhs(\n self: object, line: Line, features: Collection[Feature], mode: Mode\n ) -> Iterator[Line]:\n for omit in generate_trailers_to_omit(line, mode.line_length):\n lines = list(right_hand_split(line, mode, features, omit=omit))\n # Note: this check is only able to figure out if the first line of the\n # *current* transformation fits in the line length. This is true only\n # for simple cases. All others require running more transforms via\n # `transform_line()`. This check doesn't know if those would succeed.\n if is_line_short_enough(lines[0], mode=mode):\n yield from lines\n return\n\n # All splits failed, best effort split with no omits.\n # This mostly happens to multiline strings that are by definition\n # reported as not fitting a single line, as well as lines that contain\n # trailing commas (those have to be exploded).\n yield from right_hand_split(line, mode, features=features)", "def nullcontext() -> Iterator[None]:\n yield", "def filter_lines(view_lines):\n\n # this still doesn't work because we need to filter xml above the line level\n # do newlines from the server ever contain meaningful data or are they pointless?\n # is all the newline data given by a terminating xml-type tag?\n\n # filter lines that start with an exclude string - non-regex\n excludes = ['<prompt time=\"']\n for exclude in excludes:\n view_lines = [line for line in view_lines if line[0 : len(exclude)] != exclude]\n\n # first lets just rip out the xml... later we will want to process it back into the stream\n # mostly we can use the xml just to update the state, if that's the case then if we miss\n # one then it's no proble, we just catch the next one... provided they are regular enough.\n # if they are not, or set state once, then we definitely want to catch every one\n xml_free_lines = list()\n for line in view_lines:\n\n # assuming lines only have xml if they start with xml? interesting idea, not sure if real\n i = 0\n xml_free_line_segments = list()\n xml_line_segments = list()\n xml_free_line_part = b\"\"\n xml_line_part = b\"\"\n ordered_parsed_line = list() # give a tuple of string, type\n\n # ISSUE: i'm pretty sure this is dropping a letter off the first non-xml line segment (or more)\n # make a bunch of line segments\n # note that line is a bytes() type, indexing line[i] returns int\n # if we slice into it line[i:i+1] we get a bytes() type of length 1\n while i < len(line):\n\n if line[i : i + 1] != b\"<\":\n xml_free_line_part += line[i : i + 1]\n\n else:\n\n # increment until you get out of the xml tag or out of the line\n while i < len(line) and line[i : i + 1] != b\">\":\n xml_line_part += line[i : i + 1]\n i += 1\n\n # toss the last b'>' on the end!\n xml_line_part += line[i : i + 1]\n\n # store the xml part off\n xml_line_segments.append(xml_line_part)\n ordered_parsed_line.append((\"xml\", xml_line_part))\n xml_line_part = b\"\" # reset the xml part\n\n # store xml free part off\n if len(xml_free_line_part) > 1:\n xml_free_line_segments.append(xml_free_line_part)\n ordered_parsed_line.append((\"text\", xml_free_line_part))\n xml_free_line_part = b\"\" # reset the xml_free_line_part\n\n i += 1 # covers incrementing past the '>' and incrementing if not yet in a '<'\n\n \"\"\"\n # https://lxml.de/tutorial.html\n # if the xml cannot be parsed, we just want to catch it and decide what to do\n try:\n xml = [lxml.etree.XML(xml_line) for xml_line in xml_line_segments]\n xml_tags = [x.tag for x in xml]\n # just testing lxml tag parsing\n if b'streamWindow' in xml_tags:\n xml_free_lines.append(b'streamWindow skipped...')\n\n except lxml.etree.XMLSyntaxError:\n xml = list() # no tags\n # toss any failing XML onto the text stream for manual parsing?\n # we can follow this approach even if we replace or wrap lxml with a manual parser\n xml_free_lines.extend(xml_line_segments)\n \"\"\"\n # do stuff with the xml components of the line\n op_line = ordered_parsed_line\n\n # strip the line back down to text\n clean_line = [x[1].replace(b\"&gt;\", b\">\") for x in op_line if x[0] == \"text\"]\n xml_free_lines.append(b\"\".join(clean_line))\n\n # send a hunk of xml so we can see what happened\n xml_line = [x[1].replace(b\"&gt;\", b\">\") for x in op_line if x[0] == \"xml\"]\n xml_free_lines.append(b\"\".join(xml_line))\n\n # just point it here for now so we don't have to change the return\n view_lines = xml_free_lines\n\n \"\"\"\n EXCLUDES = [\n r'<prompt.*>',\n r'</prompt.*>',\n ]\n\n SUBS = [\n (r'<.*>', ''),\n ]\n\n # drop empty lines before the regex to save processing\n # what about lines with whitespace only...\n view_lines = [line for line in view_lines if line != b'' or line != b'&gt']\n\n for exclude in EXCLUDES:\n view_lines = [str(line) for line in view_lines if not re.search(exclude, str(line))]\n\n for expr, sub in SUBS:\n view_lines = [re.sub(expr, sub, str(line)) for line in view_lines]\n\n # drop empty lines after the regex so they aren't shown\n view_lines = [line for line in view_lines if line != b'' or line != b'&gt']\n \"\"\"\n\n return view_lines", "def visit_test(self, node: Node) -> Iterator[Line]:\n\n if Preview.parenthesize_conditional_expressions in self.mode:\n already_parenthesized = (\n node.prev_sibling and node.prev_sibling.type == token.LPAR\n )\n\n if not already_parenthesized:\n lpar = Leaf(token.LPAR, \"\")\n rpar = Leaf(token.RPAR, \"\")\n node.insert_child(0, lpar)\n node.append_child(rpar)\n\n yield from self.visit_default(node)", "def test_null(self):\n nl = NodeList([])\n r = str(nl)\n assert r == '', r\n for node in nl:\n raise Exception(\"should not enter this loop\")", "def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:\n # The current line might still wait for trailing comments. At DEDENT time\n # there won't be any (they would be prefixes on the preceding NEWLINE).\n # Emit the line then.\n yield from self.line()\n\n # While DEDENT has no value, its prefix may contain standalone comments\n # that belong to the current indentation level. Get 'em.\n yield from self.visit_default(node)\n\n # Finally, emit the dedent.\n yield from self.line(-1)", "def logicalLines(iterable, **kwargs):\n # kwargs\n kwargs = lowerKeys(kwargs)\n continueChar = kwargs.get('continuechar', '-')\n commentChar = kwargs.get('commentchar', '!')\n #\n iterable = ( line.strip() for line in iterable )\n tmp = []\n for line in iterable:\n if line.split(commentChar)[0].endswith(continueChar):\n tmp.append(line[:-1])\n else:\n if tmp:\n tmp.append(line)\n yield ' '.join(tmp)\n tmp = []\n else:\n yield line\n # flush\n if tmp:\n yield ' '.join(tmp)", "def visit_stmt(\n self, node: Node, keywords: Set[str], parens: Set[str]\n ) -> Iterator[Line]:\n normalize_invisible_parens(\n node, parens_after=parens, mode=self.mode, features=self.features\n )\n for child in node.children:\n if is_name_token(child) and child.value in keywords:\n yield from self.line()\n\n yield from self.visit(child)", "def visit_INDENT(self, node: Leaf) -> Iterator[Line]:\n # In blib2to3 INDENT never holds comments.\n yield from self.line(+1)\n yield from self.visit_default(node)", "def test_empty_greplist_returns_line(self):\n # as it might mean we are not actually searching\n eq_(self.line,line_matches_greps(self.line,[]))", "def _ProcessLine(\n self,\n first_line,\n input_line,\n line,\n stripped_line,\n output_stream):\n # Check for the start of a code block.\n if constants.START_CODEBLOCK_RE.match(stripped_line):\n if self._code_block_depth == 0:\n # Start a new collection of lines.\n self._code_block_lines = []\n else:\n # Just an embedded code block.\n self._code_block_lines.append(line)\n self._code_block_depth += 1\n return\n\n # Check for the end of a code block.\n if constants.END_CODEBLOCK_RE.match(stripped_line):\n self._code_block_depth -= 1\n if self._code_block_depth == 0:\n # Closed the highest-level code block, handle it.\n self._formatting_handler.HandleEscapedText(\n input_line,\n output_stream,\n \"\\n\")\n self._formatting_handler.HandleCodeBlockOpen(\n input_line,\n output_stream,\n None)\n code = \"\".join(self._code_block_lines)\n self._formatting_handler.HandleText(input_line, output_stream, code)\n self._formatting_handler.HandleCodeBlockClose(input_line, output_stream)\n else:\n # Just closed an embedded clode block.\n self._code_block_lines.append(line)\n return\n\n # Check if we're in a code block.\n # If we are, just put the raw text into code_block_lines.\n if self._code_block_depth != 0:\n self._code_block_lines.append(line)\n return\n\n # For empty lines, close all formatting.\n if not stripped_line:\n if not self._ConsumeTextForPlugin():\n self._SetCurrentList(input_line, 0, \" \", output_stream)\n self._CloseTags(input_line, output_stream)\n\n if self._table_columns:\n self._formatting_handler.HandleTableClose(input_line, output_stream)\n self._table_columns = []\n self._table_column = 0\n\n self._formatting_handler.HandleParagraphBreak(input_line, output_stream)\n return\n\n # Non-empty line, finish the previous line's newline.\n if not first_line:\n self._formatting_handler.HandleEscapedText(\n input_line,\n output_stream,\n \"\\n\")\n\n # Now check if we're processing within a list.\n indent_pos = constants.INDENT_RE.match(line).end()\n if (indent_pos and indent_pos < len(line) and\n not self._ConsumeTextForPlugin()):\n list_type = constants.LIST_TYPES.get(line[indent_pos], \"blockquote\")\n\n if self._SetCurrentList(input_line, indent_pos, list_type, output_stream):\n # Blockquotes take the entire remainder of the line,\n # but everything else skips the list symbol plus the space after.\n # (In case there is no space after, the first character is skipped;\n # we will warn if this is detected, as it was probably unintended.)\n if list_type == \"blockquote\":\n line = line[indent_pos:]\n else:\n if line[indent_pos + 1] != \" \":\n self._warning_method(\n input_line,\n u\"Missing space after list symbol: {0}, \"\n \"'{1}' was removed instead.\"\n .format(line[indent_pos], line[indent_pos + 1]))\n line = line[indent_pos + 2:]\n\n stripped_line = line.strip()\n else:\n # Reset to no indent.\n self._SetCurrentList(input_line, 0, \" \", output_stream)\n\n # Finally, split the line into formatting primitives.\n # We do so without whitespace so we can catch line breaks across tags.\n if constants.LINE_FORMAT_RE.match(stripped_line):\n self._ProcessMatch(\n input_line,\n constants.LINE_FORMAT_RE,\n stripped_line,\n output_stream)\n else:\n self._ProcessMatch(\n input_line,\n constants.TEXT_FORMAT_RE,\n stripped_line,\n output_stream)\n\n self._CloseTableRow(input_line, output_stream)", "def complete_statement(self, line):\n if not line or (not pyparsing.Or(self.commentGrammars).setParseAction(lambda x: '').transformString(line)):\n raise EmptyStatement()\n statement = self.parsed(line)\n while statement.parsed.multilineCommand and (statement.parsed.terminator == ''):\n statement = '%s\\n%s' % (statement.parsed.raw,\n self.pseudo_raw_input(self.continuation_prompt))\n statement = self.parsed(statement)\n if not statement.parsed.command:\n raise EmptyStatement()\n return statement", "def visit_async_stmt(self, node: Node) -> Iterator[Line]:\n yield from self.line()\n\n children = iter(node.children)\n for child in children:\n yield from self.visit(child)\n\n if child.type == token.ASYNC or child.type == STANDALONE_COMMENT:\n # STANDALONE_COMMENT happens when `# fmt: skip` is applied on the async\n # line.\n break\n\n internal_stmt = next(children)\n if Preview.improved_async_statements_handling in self.mode:\n yield from self.visit(internal_stmt)\n else:\n for child in internal_stmt.children:\n yield from self.visit(child)", "def verbatim(self, stmt, suppress=False):\n if not suppress:\n self.statements.append(stmt)\n\n return stmt", "def test_iter_empty_sll(self):\n sll = SinglyLinkedList()\n a = Node('a')\n sll.insert_beg(a)\n sll.delete(a,a)\n print [i for i in sll]", "def _complete_statement(self, line):\n if not line or (not pyparsing.Or(self.commentGrammars).setParseAction(lambda x: '').transformString(line)):\n raise EmptyStatement()\n statement = self.parser_manager.parsed(line)\n while statement.parsed.multilineCommand and (statement.parsed.terminator == ''):\n statement = '%s\\n%s' % (statement.parsed.raw,\n self.pseudo_raw_input(self.continuation_prompt))\n statement = self.parser_manager.parsed(statement)\n if not statement.parsed.command:\n raise EmptyStatement()\n return statement", "def test_remove_blank_lines(self):\n before_b = \"\"\"\\\n first line\n\n line 1\n line a\n line b\n\n line c\n last line\n \"\"\"\n after_b = \"\"\"\\\n first line\n line 1\n line a\n line b\n line c\n last line\n \"\"\"\n self.run_test(\n before_b=before_b,\n after_b=after_b,\n before_sel=(\"1.0\", \"9.0\"),\n after_sel=(\"1.0\", \"6.9\"),\n command_name=\"remove-blank-lines\",\n )", "def assert_equal_none(logical_line):\n res = (asse_equal_start_with_none_re.match(logical_line) or\n asse_equal_end_with_none_re.match(logical_line))\n if res:\n yield (0, \"G318: assertEqual(A, None) or assertEqual(None, A) \"\n \"sentences not allowed\")", "def run(self, lines):\n self.lines = [line.strip() for line in lines]\n for index, line in enumerate(lines, start=1):\n self.truncate_line(line)\n line = self.remove_links(line)\n if line.startswith(CODE_BLOCK_DELIMITER):\n self.in_code_block = not self.in_code_block\n self.check_line(index, line)", "def _split_code_lines(ast_nodes, text):\n if not ast_nodes:\n yield ([], text)\n return\n assert text.startpos <= ast_nodes[0].startpos\n assert ast_nodes[-1].startpos < text.endpos\n if text.startpos != ast_nodes[0].startpos:\n # Starting noncode lines.\n yield ([], text[text.startpos:ast_nodes[0].startpos])\n end_sentinel = _DummyAst_Node()\n end_sentinel.startpos = text.endpos\n for node, next_node in zip(ast_nodes, ast_nodes[1:] + [end_sentinel]):\n startpos = node.startpos\n next_startpos = next_node.startpos\n assert startpos < next_startpos\n # We have the start position of this node. Figure out the end\n # position, excluding noncode lines (standalone comments and blank\n # lines).\n if hasattr(node, 'endpos'):\n # We have an endpos for the node because this was a multi-line\n # string. Start with the node endpos.\n endpos = node.endpos\n assert startpos < endpos <= next_startpos\n # enpos points to the character *after* the ending quote, so we\n # know that this is never at the beginning of the line.\n assert endpos.colno != 1\n # Advance past whitespace an inline comment, if any. Do NOT\n # advance past other code that could be on the same line, nor past\n # blank lines and comments on subsequent lines.\n line = text[endpos : min(text.endpos, FilePos(endpos.lineno+1,1))]\n if _is_comment_or_blank(line):\n endpos = FilePos(endpos.lineno+1, 1)\n else:\n endpos = next_startpos\n assert endpos <= text.endpos\n # We don't have an endpos yet; what we do have is the next node's\n # startpos (or the position at the end of the text). Start there\n # and work backward.\n if endpos.colno != 1:\n if endpos == text.endpos:\n # There could be a comment on the last line and no\n # trailing newline.\n # TODO: do this in a more principled way.\n if _is_comment_or_blank(text[endpos.lineno]):\n assert startpos.lineno < endpos.lineno\n if not text[endpos.lineno-1].endswith(\"\\\\\"):\n endpos = FilePos(endpos.lineno,1)\n else:\n # We're not at end of file, yet the next node starts in\n # the middle of the line. This should only happen with if\n # we're not looking at a comment. [The first character in\n # the line could still be \"#\" if we're inside a multiline\n # string that's the last child of the parent node.\n # Therefore we don't assert 'not\n # _is_comment_or_blank(...)'.]\n pass\n if endpos.colno == 1:\n while (endpos.lineno-1 > startpos.lineno and\n _is_comment_or_blank(text[endpos.lineno-1]) and\n (not text[endpos.lineno-2].endswith(\"\\\\\") or\n _is_comment_or_blank(text[endpos.lineno-2]))):\n endpos = FilePos(endpos.lineno-1, 1)\n assert startpos < endpos <= next_startpos\n yield ([node], text[startpos:endpos])\n if endpos != next_startpos:\n yield ([], text[endpos:next_startpos])", "def test_noop_function_call(self) -> None:\n before = after = \"\"\"\n for line in xreadlines(r):\n print(line)\n \"\"\"\n self.assertCodemod(before, after)", "def standalone_comment_split(\n line: Line, features: Collection[Feature], mode: Mode\n) -> Iterator[Line]:\n if not line.contains_standalone_comments(0):\n raise CannotSplit(\"Line does not have any standalone comments\")\n\n current_line = Line(\n mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets\n )\n\n def append_to_line(leaf: Leaf) -> Iterator[Line]:\n \"\"\"Append `leaf` to current line or to new line if appending impossible.\"\"\"\n nonlocal current_line\n try:\n current_line.append_safe(leaf, preformatted=True)\n except ValueError:\n yield current_line\n\n current_line = Line(\n line.mode, depth=line.depth, inside_brackets=line.inside_brackets\n )\n current_line.append(leaf)\n\n for leaf in line.leaves:\n yield from append_to_line(leaf)\n\n for comment_after in line.comments_after(leaf):\n yield from append_to_line(comment_after)\n\n if current_line:\n yield current_line", "def __handle_blank_line(\n parser_state,\n input_line,\n from_main_transform,\n position_marker=None,\n ):\n\n if not from_main_transform:\n close_only_these_blocks = [ParagraphStackToken]\n do_include_block_quotes = False\n else:\n close_only_these_blocks = None\n do_include_block_quotes = True\n POGGER.debug(\"hbl>>from_main_transform>>$\", from_main_transform)\n POGGER.debug(\"hbl>>close_only_these_blocks>>$\", close_only_these_blocks)\n POGGER.debug(\"hbl>>do_include_block_quotes>>$\", do_include_block_quotes)\n\n non_whitespace_index, extracted_whitespace = ParserHelper.extract_whitespace(\n input_line, 0\n )\n\n is_processing_list, in_index = LeafBlockProcessor.check_for_list_in_process(\n parser_state\n )\n POGGER.debug(\n \"hbl>>is_processing_list>>$>>in_index>>$>>last_stack>>$\",\n is_processing_list,\n in_index,\n parser_state.token_stack[-1],\n )\n\n requeue_line_info = None\n new_tokens = None\n force_default_handling = False\n if parser_state.token_stack[-1].was_link_definition_started:\n POGGER.debug(\n \"hbl>>process_link_reference_definition>>stopping link definition\"\n )\n empty_position_marker = PositionMarker(-1, 0, \"\")\n (\n _,\n _,\n did_pause_lrd,\n requeue_line_info,\n new_tokens,\n ) = LinkReferenceDefinitionHelper.process_link_reference_definition(\n parser_state, empty_position_marker, \"\", \"\", \"\", 0, 0\n )\n assert not did_pause_lrd\n force_default_handling = True\n elif parser_state.token_stack[-1].is_code_block:\n stack_bq_count = parser_state.count_of_block_quotes_on_stack()\n if stack_bq_count:\n POGGER.debug(\"hbl>>code block within block quote\")\n else:\n POGGER.debug(\"hbl>>code block\")\n new_tokens = []\n elif parser_state.token_stack[-1].is_html_block:\n POGGER.debug(\"hbl>>check_blank_html_block_end\")\n new_tokens = HtmlHelper.check_blank_html_block_end(parser_state)\n elif (\n is_processing_list\n and parser_state.token_document[-1].is_blank_line\n and parser_state.token_document[-2].is_list_start\n ):\n POGGER.debug(\"hbl>>double blank in list\")\n new_tokens, _ = TokenizedMarkdown.__close_open_blocks(\n parser_state, until_this_index=in_index, include_lists=True\n )\n\n if from_main_transform:\n POGGER.debug(\"hbl>>__handle_blank_line_in_block_quote\")\n TokenizedMarkdown.__handle_blank_line_in_block_quote(parser_state)\n\n if force_default_handling or new_tokens is None:\n POGGER.debug(\"hbl>>default blank handling-->cob\")\n n_tokens, _ = TokenizedMarkdown.__close_open_blocks(\n parser_state,\n only_these_blocks=close_only_these_blocks,\n include_block_quotes=do_include_block_quotes,\n was_forced=True,\n )\n if new_tokens:\n new_tokens.extend(n_tokens)\n else:\n new_tokens = n_tokens\n\n POGGER.debug(\"hbl>>new_tokens>>$\", new_tokens)\n assert non_whitespace_index == len(input_line)\n if not (requeue_line_info and requeue_line_info.force_ignore_first_as_lrd):\n new_tokens.append(\n BlankLineMarkdownToken(extracted_whitespace, position_marker)\n )\n POGGER.debug(\"hbl>>new_tokens>>$\", new_tokens)\n\n return new_tokens, requeue_line_info", "def test_file_iterator_removes_leading_whitespace(self):\n for line in file_iterator('example_module.py'):\n self.assertFalse(line.startswith(' '))", "def visit_iterable(\n parent: \"CSTNode\",\n fieldname: str,\n children: Iterable[CSTNodeT],\n visitor: \"CSTVisitorT\",\n) -> Iterable[CSTNodeT]:\n visitor.on_visit_attribute(parent, fieldname)\n for child in children:\n new_child = child.visit(visitor)\n if isinstance(new_child, FlattenSentinel):\n yield from new_child\n elif not isinstance(new_child, RemovalSentinel):\n yield new_child\n visitor.on_leave_attribute(parent, fieldname)", "def removeLines(self) -> List['StateNode']:\n lines = self.state[0]\n states: List[StateNode] = []\n for i in range(len(lines)):\n for j in range(i + 1, len(lines) + 1):\n new_lines = lines[:i] + lines[j:]\n if len(new_lines) == 0:\n continue\n states.append(StateNode(self.table, \n (new_lines, self.state[1]),\n (lines[i:j], []),\n self.cost + len(self.state[1]),\n self))\n return states", "def invisible_visit(self, node: Node) -> None:\n pass", "def consolidate_empty_blocks(self):\n new_blocks = []\n for block in self.blocks:\n if isinstance(block, BasicBlock) and not block.statements:\n self.remove_block(block)\n else:\n new_blocks.append(block)\n self.blocks = new_blocks", "def assert_equal_not_none(logical_line):\n msg = \"M302: assertEqual(A is not None) sentences not allowed.\"\n res = assert_equal_with_is_not_none_re.search(logical_line)\n if res:\n yield (0, msg)", "def simplify_tokenized(statement: sqlparse.sql.TokenList):\n return simplify_tokens(statement.tokens)", "def instrs(self):\n x = self._firstInstr\n while x is not None:\n # now we can remove x and continue iterating :)\n x_next = x.next\n yield x\n x = x_next", "def FilterLine(self, a_line):\n return a_line", "def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]:\n\n omit: Set[LeafID] = set()\n if not line.magic_trailing_comma:\n yield omit\n\n length = 4 * line.depth\n opening_bracket: Optional[Leaf] = None\n closing_bracket: Optional[Leaf] = None\n inner_brackets: Set[LeafID] = set()\n for index, leaf, leaf_length in line.enumerate_with_length(reversed=True):\n length += leaf_length\n if length > line_length:\n break\n\n has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)\n if leaf.type == STANDALONE_COMMENT or has_inline_comment:\n break\n\n if opening_bracket:\n if leaf is opening_bracket:\n opening_bracket = None\n elif leaf.type in CLOSING_BRACKETS:\n prev = line.leaves[index - 1] if index > 0 else None\n if (\n prev\n and prev.type == token.COMMA\n and leaf.opening_bracket is not None\n and not is_one_sequence_between(\n leaf.opening_bracket, leaf, line.leaves\n )\n ):\n # Never omit bracket pairs with trailing commas.\n # We need to explode on those.\n break\n\n inner_brackets.add(id(leaf))\n elif leaf.type in CLOSING_BRACKETS:\n prev = line.leaves[index - 1] if index > 0 else None\n if prev and prev.type in OPENING_BRACKETS:\n # Empty brackets would fail a split so treat them as \"inner\"\n # brackets (e.g. only add them to the `omit` set if another\n # pair of brackets was good enough.\n inner_brackets.add(id(leaf))\n continue\n\n if closing_bracket:\n omit.add(id(closing_bracket))\n omit.update(inner_brackets)\n inner_brackets.clear()\n yield omit\n\n if (\n prev\n and prev.type == token.COMMA\n and leaf.opening_bracket is not None\n and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves)\n ):\n # Never omit bracket pairs with trailing commas.\n # We need to explode on those.\n break\n\n if leaf.value:\n opening_bracket = leaf.opening_bracket\n closing_bracket = leaf", "def minimalComrnaParser(lines):\n block = []\n first = True\n record = False\n for line in lines:\n if line.startswith('=========================== S ='):\n record = True\n if not first:\n yield block\n block = []\n first = False\n if record: \n block.append(line)\n yield block", "def rehydrate_blank_line(cls, next_token):\n return next_token.extracted_whitespace + \"\\n\"", "def visit_match_case(self, node: Node) -> Iterator[Line]:\n normalize_invisible_parens(\n node, parens_after=set(), mode=self.mode, features=self.features\n )\n\n yield from self.line()\n for child in node.children:\n yield from self.visit(child)", "def test_empty_list(self):\n self.assertLines({\n 'hosts': [],\n 'foo': 'something',\n 'another': [1,2],\n }, [\n 'foo: something hosts: []',\n 'foo: something hosts: [] another: 1',\n 'foo: something hosts: [] another: 2',\n ])", "def lineToList(self, line):\n raise NotImplementedError", "def test_file_iterator_removes_all_whitespace(self):\n for line in file_iterator('example_module.py'):\n self.assertEqual(line, line.strip())", "def test_empty_greplist_returns_line(self):\n # as it might mean we are not actually searching\n eq_(self.line,line_no_matches_ngreps(self.line,[]))", "def statements(self):\n\n while self.token.value not in ('EOF', 'else', 'end'):\n\n with self.resync('\\n', consume=True):\n self.statement()\n\n if not self.match(Tokens.SYMBOL, \";\"):\n self.error(\"expected ';' after statement \", token=self.prev_token, after_token=True)\n\n # consume the 'end' token if there is one\n self.match(Tokens.KEYWORD, 'end')", "def _process_stmt(self, node: ast.stmt) -> None:\n if isinstance(node, (ast.ClassDef, ast.FunctionDef)):\n self._process_def(node)\n elif isinstance(node, ast.Assign):\n self._process_assign(node)\n elif isinstance(node, ast.Expr):\n self._process_expr(node)\n else:\n self.visit(node)", "def __emptygen():\n if False:\n yield", "def __traverse_node(self, node):\n lines = []\n for path, node_elem in node.filter(javalang.tree.BinaryOperation):\n if node_elem.operator == 'instanceof' and node_elem.operandl.position is not None:\n code_line = node_elem.operandl.position.line or node_elem.operandr.position.line\n lines.append(code_line)\n for path, node_elem in node.filter(javalang.tree.MethodInvocation):\n if node_elem.member == 'isInstance':\n lines.append(node_elem.position.line)\n\n return lines", "def _analyse_stmt_Expr(self, statement: ast.Expr, *, next: CFNode) -> CFNode:\n return self._ast_node(statement, next=next, error=self._raise)", "def Parse(\r\n initial_statement_info: DynamicStatementInfo,\r\n normalized_iter: NormalizedIterator,\r\n observer: Observer,\r\n\r\n # True to execute all statements within a single thread\r\n single_threaded=False,\r\n) -> Optional[List[Statement.StatementParseResultItem]]:\r\n\r\n assert normalized_iter.Offset == 0, normalized_iter.Offset\r\n\r\n statement_observer = _StatementObserver(initial_statement_info, observer)\r\n results = []\r\n\r\n while not normalized_iter.AtEnd():\r\n result = Statement.ParseMultiple(\r\n statement_observer.GetDynamicStatements(DynamicStatements.Statements),\r\n normalized_iter,\r\n statement_observer,\r\n single_threaded=single_threaded,\r\n )\r\n\r\n if result is None:\r\n return None\r\n\r\n if not result.Success:\r\n raise SyntaxInvalidError(\r\n result.Iter.Line,\r\n result.Iter.Column,\r\n result.Results,\r\n )\r\n\r\n normalized_iter = result.Iter\r\n\r\n assert len(result.Results) == 1, result.Results\r\n result = result.Results[0]\r\n\r\n results.append(result)\r\n\r\n assert normalized_iter.AtEnd()\r\n\r\n return results", "def stmts_to_stmt(statements):\n if len(statements) == 1:\n return statements[0]\n array = FakeArray(statements, arr_type=pr.Array.NOARRAY)\n return FakeStatement([array])", "def _EatWhitespaceToken(\r\n cls,\r\n normalized_iter: NormalizedIterator,\r\n ) -> Optional[List[\"Statement.TokenParseResultItem\"]]:\r\n\r\n if normalized_iter.AtEnd():\r\n return None\r\n\r\n normalized_iter_begin = normalized_iter.Clone()\r\n normalized_iter = normalized_iter.Clone()\r\n\r\n result = cls._indent_token.Match(normalized_iter)\r\n if result is not None:\r\n assert not isinstance(result, list), result\r\n\r\n return [\r\n Statement.TokenParseResultItem(\r\n cls._indent_token,\r\n None,\r\n result,\r\n normalized_iter_begin,\r\n normalized_iter,\r\n IsIgnored=True,\r\n ),\r\n ]\r\n\r\n result = cls._dedent_token.Match(normalized_iter)\r\n if result is not None:\r\n assert isinstance(result, list), result\r\n\r\n return [\r\n Statement.TokenParseResultItem(\r\n cls._dedent_token,\r\n None,\r\n res,\r\n normalized_iter_begin,\r\n normalized_iter.Clone(),\r\n IsIgnored=True,\r\n )\r\n for res in result\r\n ]\r\n\r\n # A potential comment or newline may have potential whitespace\r\n potential_iter = normalized_iter.Clone()\r\n potential_whitespace = cls._ExtractWhitespace(potential_iter)\r\n potential_iter_begin = potential_iter.Clone()\r\n\r\n result = Statement.CommentToken.Match(potential_iter)\r\n if result is not None:\r\n assert not isinstance(result, list), result\r\n\r\n return [\r\n Statement.TokenParseResultItem(\r\n Statement.CommentToken,\r\n potential_whitespace,\r\n result,\r\n potential_iter_begin,\r\n potential_iter,\r\n IsIgnored=True,\r\n ),\r\n ]\r\n\r\n result = cls._newline_token.Match(potential_iter)\r\n if result is not None:\r\n assert not isinstance(result, list), result\r\n\r\n return [\r\n Statement.TokenParseResultItem(\r\n cls._newline_token,\r\n potential_whitespace,\r\n result,\r\n potential_iter_begin,\r\n potential_iter,\r\n IsIgnored=True,\r\n ),\r\n ]\r\n\r\n return None", "def visit_Compound(self, node):\n for statement in node.statements:\n self.visit(statement)", "def split_sub_statement(stream, node_types):\n \n if isinstance(stream, Node):\n stream = stream.get_inner_body()\n \n current_node = None\n \n try:\n while True:\n \n token = next(stream)\n #print('current token ', token)\n \n matched = False\n \n for node_type in node_types:\n match = Node.match_begin(node_type, token, stream)\n if match:\n \n matched = True\n previous_node = current_node\n \n # build current node\n current_node = node_type()\n current_node.children += match\n \n if previous_node:\n yield previous_node\n \n # stop looking for a match \n break\n \n # non matching token \n if not matched:\n \n if current_node:\n current_node.children.append(token)\n else:\n yield token\n except:\n pass\n\n if current_node: \n yield current_node", "def __iter__(self):\n for item in(self.data_):\n if(item!= None):#We dont want to yield spaces that have not been filled yet \n yield item", "def test_multi_no_match_return_expr(self):\n eq_(None,line_matches_greps(self.line,[\"foo\",\"idontmatch\"]))", "def unstructured(self):\n for leaf in self.sequences.values():\n if type(leaf) is list:\n for item in leaf:\n if item is None:\n continue\n yield item\n else:\n if leaf is None:\n continue\n yield leaf", "def emptyGenerator():\n return\n yield", "def test_kill_line_start_blank_line(self):\n before_b = \"\"\"\\\n line 1\n line 2\n\n line 4\n \"\"\"\n after_b = \"\"\"\\\n line 1\n line 2\n line 4\n \"\"\"\n self.run_test(\n before_b=before_b,\n after_b=after_b,\n before_sel=(\"3.0\", \"3.0\"),\n after_sel=(\"3.0\", \"3.0\"),\n command_name=\"kill-line\",\n )", "def nonempty_lines(text):\n return [line for line in text.split('\\n') if line]", "def iterparse(self, lines: Union[Iterable[str], str]) -> Iterator[Tree]:\n tokens = lex(lines, pattern=PENMAN_RE)\n while tokens and tokens.peek().type in ('COMMENT', 'LPAREN'):\n yield self._parse(tokens)", "def __post_init__(self) -> None:\n self.current_line = Line(mode=self.mode)\n\n v = self.visit_stmt\n Ø: Set[str] = set()\n self.visit_assert_stmt = partial(v, keywords={\"assert\"}, parens={\"assert\", \",\"})\n self.visit_if_stmt = partial(\n v, keywords={\"if\", \"else\", \"elif\"}, parens={\"if\", \"elif\"}\n )\n self.visit_while_stmt = partial(v, keywords={\"while\", \"else\"}, parens={\"while\"})\n self.visit_for_stmt = partial(v, keywords={\"for\", \"else\"}, parens={\"for\", \"in\"})\n self.visit_try_stmt = partial(\n v, keywords={\"try\", \"except\", \"else\", \"finally\"}, parens=Ø\n )\n self.visit_except_clause = partial(v, keywords={\"except\"}, parens={\"except\"})\n self.visit_with_stmt = partial(v, keywords={\"with\"}, parens={\"with\"})\n self.visit_classdef = partial(v, keywords={\"class\"}, parens=Ø)\n self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)\n self.visit_return_stmt = partial(v, keywords={\"return\"}, parens={\"return\"})\n self.visit_import_from = partial(v, keywords=Ø, parens={\"import\"})\n self.visit_del_stmt = partial(v, keywords=Ø, parens={\"del\"})\n self.visit_async_funcdef = self.visit_async_stmt\n self.visit_decorated = self.visit_decorators\n\n # PEP 634\n self.visit_match_stmt = self.visit_match_case\n self.visit_case_block = self.visit_match_case", "def preprocessNode(self):\n while self.node.firstChild():\n self.node.firstChild().doDelete(self.node)", "def ignore_visit_interslide(self, node):\n raise SkipNode", "def _analyse_stmt_Delete(self, statement: ast.Delete, *, next: CFNode) -> CFNode:\n return self._ast_node(statement, next=next, error=self._raise)", "def visit_Compound(self, n):\n self._create_new_node = True\n for stmt in n.block_items:\n self.visit(stmt)" ]
[ "0.6896916", "0.58363104", "0.5718831", "0.57141036", "0.5682867", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.55000716", "0.5418537", "0.5368451", "0.5360683", "0.53356993", "0.5331573", "0.53024495", "0.5291138", "0.52574867", "0.5252656", "0.5252656", "0.51917833", "0.5141353", "0.5106053", "0.5094838", "0.50736254", "0.5069351", "0.50284207", "0.5024618", "0.5018256", "0.50133896", "0.49861595", "0.49786872", "0.49785146", "0.49772006", "0.49656162", "0.49589175", "0.49340573", "0.4932339", "0.49253556", "0.48897293", "0.48871228", "0.4886285", "0.48860124", "0.4885887", "0.48580074", "0.48437464", "0.48430967", "0.48341477", "0.4828084", "0.4816148", "0.48008484", "0.4797081", "0.47964618", "0.4790771", "0.47892305", "0.47819188", "0.47782058", "0.4774245", "0.4771874", "0.47677234", "0.47511873", "0.47495037", "0.47295797", "0.47191566", "0.47145912", "0.47109663", "0.47024783", "0.47002065", "0.46950087", "0.46848392", "0.46844122", "0.46794048", "0.46751037", "0.46604595", "0.46578556", "0.46499184", "0.46366552", "0.4635384", "0.46337542", "0.46337187", "0.46325308", "0.46295142", "0.4615964", "0.46108967", "0.46095413", "0.458863", "0.45859674", "0.45820338" ]
0.4621964
94
A convenience wrapper for `visit_body_iterable` that returns a sequence instead of an iterable.
def visit_body_sequence( parent: "CSTNode", fieldname: str, children: Sequence[CSTNodeT], visitor: "CSTVisitorT", ) -> Sequence[CSTNodeT]: return tuple(visit_body_iterable(parent, fieldname, children, visitor))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def concrete(seq):\n if isinstance(seq, Iterator):\n seq = list(seq)\n if isinstance(seq, (tuple, list)):\n seq = list(map(concrete, seq))\n return seq", "def make_iterable(arg):\n return arg if is_iterable(arg) else (arg,)", "def toiter(x):\n if iterable(x):\n return x\n else:\n return [x]", "def visit_body_iterable(\n parent: \"CSTNode\",\n fieldname: str,\n children: Sequence[CSTNodeT],\n visitor: \"CSTVisitorT\",\n) -> Iterable[CSTNodeT]:\n\n visitor.on_visit_attribute(parent, fieldname)\n for child in children:\n new_child = child.visit(visitor)\n\n # Don't yield a child if we removed it.\n if isinstance(new_child, RemovalSentinel):\n continue\n\n # Don't yield a child if the old child wasn't empty\n # and the new child is. This means a RemovalSentinel\n # caused a child of this node to be dropped, and it\n # is now useless.\n\n if isinstance(new_child, FlattenSentinel):\n for child_ in new_child:\n if (not child._is_removable()) and child_._is_removable():\n continue\n yield child_\n else:\n if (not child._is_removable()) and new_child._is_removable():\n continue\n # Safe to yield child in this case.\n yield new_child\n visitor.on_leave_attribute(parent, fieldname)", "def _ensure_iterable(x):\n if isinstance(x[0], Iterable):\n if len(x) > 1:\n raise TypeError(\"Either Iterable or variable argument list expected\")\n return x[0]\n else:\n return x", "def generator_wrapper(iterable):\n\n num_items = len(iterable)\n for idx in range(num_items):\n yield iterable[idx]", "def _cast(self, out, peek=None):\n\n if isinstance(out, tulip.Future) or inspect.isgenerator(out):\n out = yield from out\n\n # Empty output is done here\n if not out:\n if 'Content-Length' not in response:\n response['Content-Length'] = 0\n return []\n # Join lists of byte or str strings. Mixed lists are NOT supported\n if isinstance(out, (tuple, list))\\\n and isinstance(out[0], (bytes, str)):\n out = out[0][0:0].join(out) # b'abc'[0:0] -> b''\n # Encode str strings\n if isinstance(out, str):\n out = out.encode(response.charset)\n # Byte Strings are just returned\n if isinstance(out, bytes):\n if 'Content-Length' not in response:\n response['Content-Length'] = len(out)\n return [out]\n # HTTPError or HTTPException (recursive, because they may wrap anything)\n # TODO: Handle these explicitly in handle() or make them iterable.\n if isinstance(out, HTTPError):\n out.apply(response)\n out = self.error_handler.get(out.status_code, self.default_error_handler)(out)\n return (yield from self._cast(out))\n if isinstance(out, HTTPResponse):\n out.apply(response)\n return (yield from self._cast(out.body))\n\n # File-like objects.\n if hasattr(out, 'read'):\n if 'wsgi.file_wrapper' in request.environ:\n return request.environ['wsgi.file_wrapper'](out)\n elif hasattr(out, 'close') or not hasattr(out, '__iter__'):\n return WSGIFileWrapper(out)\n\n # Handle Iterables. We peek into them to detect their inner type.\n try:\n iout = iter(out)\n first = next(iout)\n while not first:\n first = next(iout)\n except StopIteration:\n return (yield from self._cast(''))\n except HTTPResponse:\n first = _e()\n except (KeyboardInterrupt, SystemExit, MemoryError):\n raise\n except Exception:\n if not self.catchall: raise\n first = HTTPError(500, 'Unhandled exception', _e(), format_exc())\n\n # These are the inner types allowed in iterator or generator objects.\n if isinstance(first, HTTPResponse):\n return (yield from self._cast(first))\n elif isinstance(first, bytes):\n new_iter = itertools.chain([first], iout)\n elif isinstance(first, str):\n encoder = lambda x: x.encode(response.charset)\n new_iter = map(encoder, itertools.chain([first], iout))\n else:\n msg = 'Unsupported response type: %s' % type(first)\n return (yield from self._cast(HTTPError(500, msg)))\n if hasattr(out, 'close'):\n new_iter = _closeiter(new_iter, out.close)\n return new_iter", "def get_iterable(obj):\n if obj is None:\n return ()\n\n if (isinstance(obj, collections.Iterable) and\n not isinstance(obj, six.string_types)):\n return obj\n\n return (obj,)", "def flatten(l):\n for el in l:\n if isinstance(el, collections.Iterable) and not isinstance(el, (str, bytes)):\n yield from flatten(el)\n else:\n yield el", "def simple_seq(seq):\n for i in seq:\n yield i", "def actually_flatten(iterable):\n remainder = iter(iterable)\n while True:\n first = next(remainder) # pylint: disable=R1708\n # Python 2/3 compat\n is_iter = isinstance(first, collections.Iterable)\n try:\n basestring\n except NameError:\n basestring = str # pylint: disable=W0622\n\n if is_py3() and is_iter and not_a_string(first):\n remainder = IT.chain(first, remainder)\n elif (not is_py3()) and is_iter and not isinstance(first, basestring):\n remainder = IT.chain(first, remainder)\n else:\n yield polite_string(first)", "def caboose(seq: Iterable[TItem], el: TElement) -> Iterable[Union[TElement, TItem]]:\n yield from seq\n yield el", "def ensure_list(iterable: Iterable[A]) -> List[A]:\n if isinstance(iterable, list):\n return iterable\n else:\n return list(iterable)", "def ensure_list(iterable: Iterable[A]) -> List[A]:\n if isinstance(iterable, list):\n return iterable\n else:\n return list(iterable)", "def test_iterlist_op_1():\n\n @ops.iterlist_op\n def f(x):\n return [4, 5, 6]\n\n result = f(iter([1, 2, 3])) # Passing in an iterator, as expected\n\n assert(isinstance(result, list)), f\"{result}\"\n assert(result == [4, 5, 6])", "def render_iterable(content, **context):\n tail = iter(content)\n head = next(tail)\n\n # Render tag around the content\n if isinstance(head, basestring):\n for e in render_tag(head, tail, **context):\n yield e\n # Render nested lists\n elif isinstance(head, collections.Iterable):\n for e in render_iterable(head, **context):\n yield e\n for content in tail:\n for e in render_content(content, **context):\n yield e", "def _iterator_codegen(resty):\n\n def codegen(context, builder, sig, args):\n [d] = args\n [td] = sig.args\n iterhelper = context.make_helper(builder, resty)\n iterhelper.parent = d\n iterhelper.state = iterhelper.state.type(None)\n return impl_ret_borrowed(\n context,\n builder,\n resty,\n iterhelper._getvalue(),\n )\n\n return codegen", "def gather(iterable: Iterable[Maybe[A]]) -> Maybe[Iterable[A]]:\n return cast(Maybe[Iterable[A]], sequence_(Just, iterable))", "def test_iteriter_op_1():\n\n @ops.iteriter_op\n def f(x):\n return iter([4, 5, 6])\n\n result = f(iter([1, 2, 3])) # Passing in an iterator, as expected\n\n assert(isinstance(result, collections.abc.Iterator)), f\"{result}\"\n assert(list(result) == [4, 5, 6])", "def test_listiter_op_1():\n\n @ops.listiter_op\n def f(x):\n return iter([4, 5, 6])\n\n result = f([1, 2, 3]) # Passing in a list, as expected\n\n assert(isinstance(result, collections.abc.Iterator)), f\"{result}\"\n assert(list(result) == [4, 5, 6])", "def builtin_iterable(func):\n if sys.version_info[:1] < (3,):\n @wraps(func)\n def inner(*args, **kwargs):\n return list(func(*args, **kwargs))\n return inner\n return func", "def is_iterable_object(maybe_iterable: Any) -> TypeGuard[Iterable[Any]]:\n\n return isinstance(maybe_iterable, Iterable)", "def makeiter(obj):\n if not obj:\n return []\n if not isiterable(obj):\n return [obj]\n return obj", "def flatten_iterator(x: Iterable[_T]) -> Iterator[_T]:\n elem: _T\n for elem in x:\n if not isinstance(elem, str) and hasattr(elem, \"__iter__\"):\n yield from flatten_iterator(elem)\n else:\n yield elem", "def flatten(iterable):\n for element_iterable in iterable:\n for element in element_iterable:\n yield element", "def is_iterable(obj):\n return isinstance(obj, (list, tuple, types.GeneratorType)) or \\\n (not isinstance(obj, (int, str, dict)) and\n bool(getattr(obj, \"next\", False)))", "def safe_iterator(i):\n return i or []", "def iterable(arg):\n return isinstance(arg, collections.Iterable) and not isinstance(arg, six.string_types)", "def flatten(iterable):\n return [x for x in actually_flatten(iterable)]", "def __call__(self, iterable):\n if self._ordered:\n imap = self._distrubtor.imap\n else:\n imap = self._distrubtor.imap_unordered\n\n for result in imap(iterable):\n yield result", "def flatten(items):\n for x in items:\n if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):\n for sub_x in flatten(x):\n yield sub_x\n else:\n yield x", "def flatten(l):\n for el in l:\n if isinstance(el, Iterable) and not isinstance(el, (str, bytes)):\n for sub in flatten(el):\n yield sub\n else:\n yield el", "def flatten(*seqs):\n for item in itertools.chain.from_iterable(seqs):\n if iscollection(item):\n yield from item\n else:\n yield item", "def _items(mappingorseq):\n if hasattr(mappingorseq, 'items'):\n return iteritems(mappingorseq)\n return mappingorseq", "def get_children(cls, node: ast.AST) -> Iterable[ast.AST]:\n body: Optional[Sequence[ast.AST]] = getattr(node, 'body', None)\n if body is not None:\n for child in body:\n yield child", "def to_iterable(stage=pypeln_utils.UNDEFINED, maxsize=0):\n\n if utils.is_undefined(stage):\n return utils.Partial(lambda stage: _to_iterable(stage, maxsize))\n else:\n return _to_iterable(stage, maxsize)", "def to_iterable(\n stage: Stage = pypeln_utils.UNDEFINED, maxsize: int = 0\n) -> typing.Iterable:\n\n if pypeln_utils.is_undefined(stage):\n return pypeln_utils.Partial(lambda stage: to_iterable(stage, maxsize=maxsize))\n\n if isinstance(stage, Stage):\n iterable = stage.to_iterable(maxsize=maxsize)\n else:\n iterable = stage\n\n return iterable", "def mkiter(item):\n # FIXME: don't really need to construct a list\n if item is None:\n return iter(())\n elif isIterable(item):\n return iter(item)\n else:\n return iter([item])", "def intercept(iterable, function):\n\n def intercepting(iterable_):\n for item in iterable_:\n function(item)\n yield item\n\n return intercepting(iterable)", "def __iter__(self):\n return iter(self.to_list())", "def flatten(iterable):\n return it.chain.from_iterable(iterable)", "def pipeline(func):\n @wraps(func)\n def process(img_or_iterable, *args, **kwargs):\n if isinstance(img_or_iterable, (SliceableIterable, FramesSequence)):\n _len = len(img_or_iterable)\n s = SliceableIterable(img_or_iterable, range(_len), _len)\n s._proc_func = lambda image: func(image, *args, **kwargs)\n return s\n else:\n # Fall back on normal behavior of func, interpreting input\n # as a single image.\n return func(img_or_iterable)\n\n if process.__doc__ is None:\n process.__doc__ = ''\n process.__doc__ = (\"This function has been made pims-aware. When passed\\n\"\n \"a pims reader or SliceableIterable, it will return a \\n\"\n \"new SliceableIterable of the results. When passed \\n\"\n \"other objects, its behavior is \"\n \"unchanged.\\n\\n\") + process.__doc__\n return process", "def _NextItem(self):\n if self._injected:\n self._injected = False\n return self._injected_value\n try:\n # Object is a generator or iterator.\n return self._iterable.next()\n except AttributeError:\n pass\n except StopIteration:\n self._tap.Done()\n raise\n try:\n # Object is a list.\n return self._iterable.pop(0)\n except (AttributeError, KeyError, TypeError):\n pass\n except IndexError:\n self._tap.Done()\n raise StopIteration\n # Object is not iterable -- treat it as the only item.\n if self._iterable is None or self._stop:\n self._tap.Done()\n raise StopIteration\n self._stop = True\n return self._iterable", "def flatten(iterable):\n return chain(*iterable)", "def maybe_generator(obj):\n if isinstance(obj, types.GeneratorType):\n for elt in obj:\n yield elt\n else:\n yield obj", "def flatten_as_list(iterable):\n return list(chain(*iterable))", "def flow_to_iter(flow):\n if ((sys.version_info.major == 3 and hasattr(flow, \"__next__\"))\n or (sys.version_info.major == 2 and hasattr(flow, \"next\"))):\n return flow\n else:\n return iter(flow)", "def accept(self, visitor: Any) -> Any:\n visitor.visit_entire_sequence(self)", "def PeekIterable(iterable):\n try:\n head_element = iterable.next()\n new_iterable = itertools.chain([head_element], iterable)\n return head_element, new_iterable\n except StopIteration:\n return None, iterable", "def anyObject(iterable):\n for obj in iterable:\n return obj", "def __iter__(self):\n return iter(())", "def flatmap(iterable, function_to_list):\n for element in iterable:\n list_block = function_to_list(element)\n for result_value in list_block:\n yield result_value", "def _assert_valid_value_and_cast(self, value):\n if not hasattr(value, '__iter__'):\n raise AssertionError(f\"{value} is not a valid iterable type\")\n return value", "def body(self):\n return list(\n itertools.chain.from_iterable(story.body for story in self.stories)\n )", "def is_sequence_of_iterable(items):\n return all(is_item_iterable(item) for item in items)", "def is_iterable(obj):\n if isinstance(obj, (str, bytes, bytearray)):\n return False\n return isinstance(obj, Iterable)", "def visit_iterable(\n parent: \"CSTNode\",\n fieldname: str,\n children: Iterable[CSTNodeT],\n visitor: \"CSTVisitorT\",\n) -> Iterable[CSTNodeT]:\n visitor.on_visit_attribute(parent, fieldname)\n for child in children:\n new_child = child.visit(visitor)\n if isinstance(new_child, FlattenSentinel):\n yield from new_child\n elif not isinstance(new_child, RemovalSentinel):\n yield new_child\n visitor.on_leave_attribute(parent, fieldname)", "def for_each(f: Callable[[A], Maybe[B]], iterable: Iterable[A]\n ) -> Maybe[Iterable[B]]:\n return cast(Maybe[Iterable[B]], map_m_(Just, f, iterable))", "def item_iter(self, a):\r\n for i in a[0]:\r\n yield i", "def isIterable(obj):\n return isinstance(obj, ListType)", "def convert_yielded(yielded):\n # Lists and dicts containing YieldPoints were handled earlier.\n if isinstance(yielded, (list, dict)):\n return multi(yielded)\n elif is_future(yielded):\n return yielded\n else:\n print(\"yielded unknown object %r\" % (yielded, ))\n raise BadYieldError(\"yielded unknown object %r\" % (yielded,))", "def map_transformer(item_transformer):\r\n\r\n def wrapper(items: collections.Iterable):\r\n return map(item_transformer, items)\r\n\r\n return wrapper", "def is_iterable(var):\n return any(isinstance(var, cls) for cls in [list, tuple, types.GeneratorType])", "def _request_generator(request, data_handler):\n # First, the request header.\n yield data_handler.request_to_bytes(request)\n\n # Then, for the body. The body can be bytes or an iterator, but that's it.\n # The iterator is the more general case, so let's transform the bytes into\n # an iterator via my friend the list.\n if isinstance(request.body, bytes):\n body = [request.body]\n else:\n body = request.body\n\n for data_chunk in body:\n yield data_handler.body_chunk_to_bytes(data_chunk)\n\n yield data_handler.end_of_body()", "def iter_cast(inputs, dst_type, return_type=None):\n if not isinstance(inputs, abc.Iterable):\n raise TypeError(\"inputs must be an iterable object\")\n if not isinstance(dst_type, type):\n raise TypeError('\"dst_type\" must be a valid type')\n\n out_iterable = map(dst_type, inputs)\n\n if return_type is None:\n return out_iterable\n else:\n return return_type(out_iterable)", "def _is_iterable(items):\n return isinstance(items, (list, tuple, set, np.ndarray))", "def with_iter(contextmanager):\n with contextmanager as iterable:\n for item in iterable:\n yield item", "def _build_iterable(self):", "def deep_flatten(iterable):\n pass", "def item_iter(self, a):\n for i in a[0]:\n yield i", "def map(iterable, function):\n for x in iterable:\n yield function(x)", "def __iter__(self):\n if self.is_payload(set, list):\n return iter(self._attr)\n\n if self.is_payload(dict):\n as_key_pairs = [(k, v) for k, v in self._attr.items()]\n return iter(as_key_pairs)", "def _is_iterable(value):\n return isinstance(value, list) or isinstance(value, tuple)", "def flatten_stream(chunk_stream: collections.Iterable) -> collections.Iterable:\r\n return chain.from_iterable(chunk_stream)", "def isIterable(value):\n try:\n iter(value)\n return True\n except:\n return False", "async def _aiter_sync(iterable: Iterable[T]) -> AsyncIterator[T]:\n for item in iterable:\n yield item", "def flatmap(func, *iterable) -> Iterator:\n return map(func, chain(*chain(*iterable)))", "def flatten(*args):\n for arg in args:\n for item in arg:\n yield item", "def __iter__(self):\n return iter(self._items)\n # to use a generator, it would look like this...\n # for item in self._items: yield item", "def map(function, iterable):\n\n return [function(x) for x in iterable]", "def flatten(list_):\n for elem in list_:\n if type(elem) != list:\n yield elem\n else:\n yield from flatten(elem)", "def flatten(sequence):\n\n if not isinstance(sequence, (list, tuple)):\n raise TypeError('Support only an instance of list or tuple')\n\n for i in sequence:\n if isinstance(i, (list, tuple)):\n yield from flatten(i)\n else:\n yield i", "def body(self):\n return ''.join(list(self.iter))", "def flatten(c):\n for x in c:\n if isinstance(x, str) or not isinstance(x, Iterable):\n yield x\n else:\n yield from flatten(x)", "def chunk(iterable, size=AWS_PAGE_SIZE):\n return (iterable[pos:pos + size] for pos in range(0, len(iterable), size))", "def __iter__(self):\n # type: () -> Iterator[Any]\n return iter(self[index] for index in range(len(self)))", "def __iter__(self):\n return iter(self._items)", "def __iter__(self):\n return iter(self._items)", "def sequence_side_effect(*args):\n seq = list(args)\n\n def rv_fun(*args, **kw):\n return seq.pop(0)\n return rv_fun", "def __iter__(self):\r\n\r\n return iter(self._contents)", "def chek_iter_obj(func):\n\n def wrapper(self, lst):\n try:\n iter(lst)\n return func(self, lst)\n except TypeError:\n raise TypeError(f\"{lst} isn't iterable object\")\n\n return wrapper", "def __iter__(self):\n return iter(self.items)", "def __iter__(self):\n return iter(self.items)", "def __iter__(self):\n for x in self.seq: yield x", "def __iter__(self) -> Iterator[Any]:\n return iter(self.contents)", "def __iter__(self):\r\n return iter(self._items)", "def __iter__(self):\r\n return iter(self._items)", "def get_iterable_itemtype(obj):\n # support further specific iterables on demand\n if isinstance(obj, _typechecked_Iterable):\n return obj.itemtype\n try:\n if isinstance(obj, range):\n tpl = tuple(deep_type(obj.start), deep_type(obj.stop), deep_type(obj.step))\n return Union[tpl]\n except TypeError:\n # We're running Python 2\n pass\n if type(obj) is tuple:\n tpl = tuple(deep_type(t) for t in obj)\n return Union[tpl]\n elif type(obj) is types.GeneratorType:\n return get_generator_yield_type(obj)\n else:\n tp = deep_type(obj)\n if is_Generic(tp):\n if issubclass(tp.__origin__, Iterable):\n if len(tp.__args__) == 1:\n return tp.__args__[0]\n return _select_Generic_superclass_parameters(tp, Iterable)[0]\n if is_iterable(obj):\n if type(obj) is str:\n return str\n if hasattr(obj, '__iter__'):\n if has_type_hints(obj.__iter__):\n itrator = _funcsigtypes(obj.__iter__, True, obj.__class__)[1]\n if is_Generic(itrator) and itrator.__origin__ is _orig_Iterator:\n return itrator.__args__[0]\n if hasattr(obj, '__getitem__'):\n if has_type_hints(obj.__getitem__):\n itrator = _funcsigtypes(obj.__getitem__, True, obj.__class__)[1]\n if is_Generic(itrator) and itrator.__origin__ is _orig_Iterator:\n return itrator.__args__[0]\n return None # means that type is unknown\n else:\n raise TypeError('Not an iterable: '+str(type(obj)))", "def unchunk():\n\n @filters\n def _dagpype_internal_fn_act(target):\n try:\n while True:\n a = (yield)\n if len(a) == 0:\n continue\n if a.ndim == 1:\n for i in range(a.shape[0]):\n target.send(a[i]) \n else:\n for i in range(a.shape[0]):\n target.send(tuple(a[i]))\n except GeneratorExit:\n if len(l) > 0:\n target.send(numpy.array(l, dtype = dtype_)) \n \n return _dagpype_internal_fn_act", "def flatten(nested_iterable):\n\n for item in nested_iterable:\n if hasattr(item, '__iter__') and not isinstance(item, str):\n for i in flatten(item):\n yield i\n else:\n yield item" ]
[ "0.628293", "0.5938098", "0.59329444", "0.5872919", "0.5857194", "0.5846575", "0.5786622", "0.578137", "0.5776865", "0.5740671", "0.5727584", "0.57273525", "0.5643586", "0.5643586", "0.56330574", "0.55489177", "0.55487835", "0.5492281", "0.5490387", "0.5448653", "0.5433615", "0.5416934", "0.5412295", "0.54024124", "0.53946483", "0.5388618", "0.5369647", "0.5358001", "0.53463924", "0.5344537", "0.532611", "0.5268573", "0.5258", "0.52557445", "0.5254348", "0.5243862", "0.5226014", "0.5208671", "0.52080387", "0.5199002", "0.51947945", "0.51771265", "0.5148487", "0.5142143", "0.51407427", "0.5137309", "0.51322436", "0.5122473", "0.51054895", "0.50953776", "0.50884", "0.5087657", "0.50778204", "0.5067992", "0.50675654", "0.5065182", "0.5063304", "0.5054283", "0.50526166", "0.5044745", "0.5042159", "0.5040541", "0.5035655", "0.5021558", "0.5020222", "0.501971", "0.50162596", "0.5013341", "0.50129837", "0.49962544", "0.49835047", "0.4973593", "0.49689618", "0.49662495", "0.49576557", "0.49517894", "0.49481523", "0.49418584", "0.49365056", "0.4933662", "0.4928616", "0.49217933", "0.49064922", "0.49010703", "0.49007252", "0.48986393", "0.4897704", "0.4897704", "0.48972183", "0.48946366", "0.48941603", "0.48880816", "0.48880816", "0.48876527", "0.487333", "0.4864763", "0.4864763", "0.48623234", "0.48576182", "0.48575145" ]
0.60773885
1
make generic 2d drawer
def makeDrawer(self,node): drawer = MeshDrawer2D() drawer.setBudget(3000) drawerNode = drawer.getRoot() drawerNode.reparentTo(node) drawerNode.setDepthWrite(False) drawerNode.setTransparency(True) drawerNode.setTwoSided(True) drawerNode.setBin("fixed",0) drawerNode.setLightOff(True) drawerNode.node().setBounds(OmniBoundingVolume()) drawerNode.node().setFinal(True) # debug wire frame #cc = drawerNode.copyTo(node) #cc.setRenderModeWireframe() return drawer
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw():", "def draw_walker(indx):\n chart_1.create_oval(hips[indx]-6, hips[indx+1]-6,hips[indx]+6, hips[indx+1]+6, fill= \"magenta\", width = 1, tag = 'line_1') \n chart_1.create_line(hips[indx], hips[indx+1], knee_a[indx], knee_a[indx+1], fill= \"blue\", width = 2, tag = 'line_1') \n chart_1.create_line(hips[indx], hips[indx+1], knee_b[indx], knee_b[indx+1], fill= \"green\", width = 2, tag = 'line_1') \n chart_1.create_line(knee_a[indx], knee_a[indx+1], heel_a[indx], heel_a[indx+1], fill= \"blue\", width = 2, tag = 'line_1') \n chart_1.create_line(knee_b[indx], knee_b[indx+1], heel_b[indx], heel_b[indx+1], fill= \"green\", width = 2, tag = 'line_1')", "def __init__(self, controller):\r\n self.controller = controller\r\n \r\n pygame.init() \r\n pygame.display.set_caption(\"Desktop CNC Miller\")\r\n #create the screen\r\n self.max_x = 1200\r\n self.max_y = 600\r\n self.window = pygame.display.set_mode( (self.max_x, self.max_y) )\r\n #set background\r\n #self.window.fill( (30, 30, 255) )\r\n self.window.fill( (0,0,0) )\r\n \r\n midpnt = int(self.max_x*0.6)\r\n self.drawer_bounds = pygame.Rect(0, 0, midpnt, self.max_y)\r\n self.control_panel_bounds = pygame.Rect(midpnt, 0, self.max_x-midpnt, self.max_y)\r\n \r\n self.control_panel = ControlPanel(self.window, self.control_panel_bounds, self.controller)\r\n self.drawer = Drawer(self.window)\r\n \r\n self.control_panel.draw()", "def draw(self, context):\n layout = self.layout\n\n pie = layout.menu_pie()\n pie.operator(\"object.view_menu\", text=\"Node Editor\", icon='NODETREE').vp = \"NODE_EDITOR\"\n pie.operator(\"object.view_menu\", text=\"UV Image Editor\", icon='IMAGE_COL').vp = \"IMAGE_EDITOR\"\n pie.operator(\"object.view_menu\", text=\"Video Sequece Editor\", icon='SEQUENCE').vp = \"SEQUENCE_EDITOR\"\n pie.operator(\"object.view_menu\", text=\"Movie Clip Editor\", icon='CLIP').vp = \"CLIP_EDITOR\"", "def draw_components(drawer, center, scale, tile):\n # Draw any animal territories on the tile\n if tile.animal is not None:\n animal_color = {\"bear\": \"black\", \"cougar\": \"red\"}[tile.animal]\n draw_poly(\n drawer,\n center=center,\n sides=6,\n scale=int(0.8 * scale),\n border=COLORS[animal_color],\n )\n\n # Draw any structure on the tile\n if tile.structure is not None:\n struct, color = tile.structure\n border = \"white\" if color == \"black\" else \"black\"\n\n if struct == \"shack\":\n struct_center = [center[0], center[1] + 0.45 * scale]\n draw_poly(\n drawer,\n center=struct_center,\n sides=3,\n scale=int(0.5 * scale),\n color=COLORS[color],\n border=COLORS[border],\n rotation=cmath.pi / 6,\n )\n else:\n struct_center = [center[0], center[1] + 0.35 * scale]\n draw_poly(\n drawer,\n center=struct_center,\n sides=8,\n scale=int(0.4 * scale),\n color=COLORS[color],\n border=COLORS[border],\n rotation=cmath.pi / 8,\n )\n\n player_count = len(tile.players) - tile.players.count(None)\n if player_count:\n draw_width = 0.4 * scale * (player_count - 1)\n used_count = 0\n for player, truth in enumerate(tile.players):\n if truth is not None:\n x, y = center\n x += used_count * 0.4 * scale - draw_width / 2\n y -= 0.15 * scale\n\n used_count += 1\n\n if truth:\n draw_poly(\n drawer,\n center=(x, y),\n sides=20,\n scale=int(0.3 * scale),\n color=COLORS[f\"p{player + 1}\"],\n # border=COLORS['black'],\n )\n else:\n draw_poly(\n drawer,\n center=(x, y),\n sides=4,\n scale=int(0.3 * scale),\n color=COLORS[f\"p{player + 1}\"],\n # border=COLORS['black'],\n rotation=cmath.pi / 4,\n )", "def draw(self):", "def __init__(self, id, lvl):\n\n self.id = id \n self.name = lvl[512:512+32].tostring().strip()\n a2c = {} # attribute to c mapping\n self.bgchars = [None] * 8\n self.bgattr = [None] * 8\n self.bgpal = [None] * 8\n for c in range(8):\n o = 544 + 9 * c\n attr = lvl[o]\n data = lvl[o+1:o+9]\n if not attr in a2c:\n a2c[attr] = c\n self.bgchars[c] = data\n self.bgattr[c] = attr\n self.background = array.array('B', [a2c[a] for a in lvl[:512]])\n self.border = gd2.RGB(*color(lvl[627], 0))\n self.item = lvl[692:700]\n self.items = []\n def getxy(dd):\n \"\"\" decode a packed screen coordinate \"\"\"\n x = dd[0] & 31\n y = (dd[0] >> 5) + ((dd[1] & 1) << 3)\n return (8 * x, 8 * y)\n\n for i in range(5):\n idata = lvl[629 + 5*i:629 + 5 * (i+1)]\n if idata[0] == 255:\n break\n if idata[0]:\n (x, y) = getxy(idata[1:3])\n self.items.append((x, y))\n\n self.portalattr = lvl[655]\n self.portal = lvl[656:688]\n self.portalxy = getxy(lvl[688:690])\n self.willyxy = getxy(lvl[620:622])\n self.willyxy = (self.willyxy[0], self.willyxy[1] + ((lvl[616] >> 1) & 7))\n self.willyd = lvl[618];\n self.willyf = lvl[617];\n\n self.guardian = [lvl[768+32*i:768+32*(i+1)] for i in range(8)]\n\n self.hguardians = [(0,0,0,0,0,0)] * 8\n for i in range(4):\n gdata = lvl[702 + 7*i: 702 + 7*(i+1)]\n if gdata[0] == 255:\n break\n if gdata[0]:\n a = gdata[0]\n x,y = getxy(gdata[1:3])\n d = gdata[4]\n x0 = 8 * (gdata[5] & 0x1f)\n x1 = 8 * (gdata[6] & 0x1f)\n assert x0 < x1\n self.hguardians[i] = (a, x, y, d, x0, x1)\n if self.id != 4: # special for Eugene's lair\n for i in range(4):\n gdata = lvl[733 + 7*i: 733 + 7*(i+1)]\n if gdata[0] == 255:\n break\n if gdata[0]:\n a = gdata[0]\n y = gdata[2]\n x = gdata[3] * 8\n d = gdata[4]\n y0 = gdata[5]\n y1 = gdata[6]\n self.hguardians[4+i] = (a, x, y, d, y0, y1)\n self.conveyordir = 1 - lvl[623] # Game uses 1=right here, sigh\n self.air = 8 * ((lvl[700] - 32) - 4) + len([c for c in bin(lvl[701]) if (c == '1')])\n self.special = lvl[736:768]\n assert (0 < self.air <= 224)\n print self.name", "def draw(self,children):\n self.clip = [(0,0,gui._width+100, gui._height+100)]\n\n self.drawer.setClip(0,0,gui._width+100, gui._height+100)\n \n self.drawer.begin()\n z = 0\n for child in reversed(children):\n z += 1\n self.drawChild(0,0,z,child)\n \n self.drawer.end()", "def update_drawer_img(self):\n self.drawer = aggdraw.Draw(self.img)\n self.drawer.settransform(self.coordspace_transform)", "def draw(self, screen):", "def makeThemeDrawer(self,node):\n themeDrawer = self.makeDrawer(node)\n themeDrawer.getRoot().setTexture(self.image)\n return themeDrawer", "def drawType(self, gtype):\n if gtype == \"mirror\":\n mirror = self.mirror()\n for x in range(self.numRows):\n print mirror[x]\n elif gtype == \"upsidedown\":\n usd = self.upsidedown()\n for x in range(self.numRows):\n print usd[x]\n elif gtype == \"upsidedownmirror\":\n usd = self.upsidedownmirror()\n for x in range(self.numRows):\n print usd[x]", "def draw(self): \n pygame.event.clear()\n self.window = ocempgui.widgets.Box(GG.utils.SCREEN_SZ[0], GG.utils.SCREEN_SZ[1])\n self.paintScreen()\n self.paintAvatar()\n self.paintTags()\n self.paintCustomizeZone()\n self.paintButtons()\n self.window.zOrder = 90000\n self.window.depth = 2\n return self.window", "def init_right_zone(self):\n self.right_zone_rect = pg.rect.Rect(500, 0, 300, 80)\n self.right_zone_image = pg.Surface(self.right_zone_rect.size).convert()\n self.right_zone_image.fill(pg.Color('#82A6CB'))\n self.right_zone_bottom_rect = pg.rect.Rect(500, 70, 800, 10)\n self.right_zone_bottom_image = pg.Surface(\n self.right_zone_bottom_rect.size).convert()\n self.right_zone_bottom_image.fill(pg.Color('#3667A6'))\n self.right_zone_side_rect = pg.rect.Rect(500, 50, 10, 30)\n self.right_zone_side_image = pg.Surface(\n self.right_zone_side_rect.size).convert()\n self.right_zone_side_image.fill(pg.Color('#3667A6'))\n\n self.selected_tower = None\n self.selected_monster = None\n self.tower_attack_image = prepare.GFX['icons']['tower_damage']\n self.tower_cooldown_image = prepare.GFX['icons']['tower_cooldown']\n self.monster_health_image = prepare.GFX['icons']['monster_health']\n self.monster_speed_image = prepare.GFX['icons']['monster_speed']\n self.selected_image_pos = (510, 10)\n self.selected_name_pos = (570, 7)\n self.selected_info_pos = (570, 30)\n self.selected_description_pos = (570, 50)", "def _create(self):\n if self.h >= 2:\n # Draw standard shape\n for i in range(1, self.h - 1):\n self.window.addch(i, 0, curses.ACS_VLINE | self.colour) # '|'\n\n # Draw scrolling bar if necessary\n if self.size > 0:\n end = min(self.pos + self.size, self.h)\n for i in range(self.pos, end):\n self.window.addch(i, 0, chr(0x2588), self.colour) # '█'\n\n # Draw arrows if necessary\n if self.counter > 0:\n self.window.addch(0, 0, chr(0x25B2), self.colour) # '▲'\n if self.counter < self.content_size - self.h:\n self.window.addch(self.h - 1, 0, chr(0x25BC), self.colour) # '▼'\n\n # Finally refresh window\n self.window.refresh()", "def draw(self, context):\n layout = self.layout\n pie = layout.menu_pie()\n\n pie.operator(\"wm.call_menu_pie\", text=\"Images\", icon='IMAGE_COL').name = \"PieAreaViewsImage\"\n pie.operator(\"wm.call_menu_pie\", text=\"Anim\", icon='IPO').name = \"PieAreaViewsAnim\"\n pie.operator(\"screen.screen_full_area\", text=\"Full Screen\", icon='FULLSCREEN_ENTER')\n pie.operator(\"object.view_menu\", text=\"3D View\", icon='VIEW3D').vp = \"VIEW_3D\"\n\n # DIAGONALS\n pie.operator(\"wm.call_menu_pie\", text=\"Utils\", icon='BUTS').name = \"PieAreaViewsUtils\"\n pie.separator()\n pie.operator(\"area.joinarea\", text=\"Join\", icon='X')\n pie.operator(\"wm.call_menu_pie\", text=\"Split\", icon='SPLITSCREEN').name = \"pie.split_viewport\"", "def draw(self,renderer,dx,dy):\n for i in self.itemType.find('display'):\n if i.tag == 'rect':\n colors = i.find('color').text[1:-1].split(',')\n SDL_SetRenderDrawColor(renderer,int(colors[0]),int(colors[1]),int(colors[2]),int(colors[3]) if len(colors) > 3 else 255)\n rect = SDL_Rect()\n rect.x, rect.y = self.getPos()\n rect.x, rect.y = rect.x+dx,rect.y+dy\n rect.w, rect.h = self.getSize()\n SDL_RenderFillRect(renderer,rect)", "def draw_door():\n\n jose.penup()\n jose.goto(38, -137)\n jose.pendown()\n jose.begin_fill()\n for i in range(2): # this loop draws a rectangle for the door of the building.\n jose.forward(40)\n jose.right(90)\n jose.forward(20)\n jose.right(90)\n jose.end_fill()\n jose.hideturtle()", "def make_visual(self,\r\n ruler = None,\r\n options: list = ['rows',\r\n 'centers',\r\n 'distances']\r\n ) -> 'Image':\r\n \r\n original = self.get_picture().get_photo()\r\n\r\n # Copy the original image for drawing\r\n img = original.copy()\r\n draw = ImageDraw.Draw(img)\r\n\r\n # check all the choices provided by the user\r\n for i in options:\r\n \r\n if i == 'clusters':\r\n # Color all cluster pixels red\r\n \r\n for j in self.get_clusters():\r\n for k in j:\r\n img.putpixel(k, (25,275,25))\r\n\r\n elif i == 'row_ids':\r\n # Make row id numbers\r\n\r\n # Font specifications\r\n size = 75\r\n font = ImageFont.truetype('ariblk.ttf', size)\r\n color = (88, 214, 216)\r\n num = 1\r\n\r\n # Draw the ids\r\n for j in self.rows:\r\n draw.text((j[0].get_center()[0],\r\n j[0].get_center()[1] - 0.25 * size),\r\n str(num),\r\n fill = color,\r\n font = font)\r\n num += 1\r\n\r\n elif i == 'boxes':\r\n # Show all bounding boxes\r\n \r\n for i in self.get_boxes():\r\n draw.rectangle(i, outline=(255, 0, 255))\r\n\r\n elif i == 'dirt':\r\n # Remove Background\r\n \r\n img = Image.new('RGB', img.size, (130, 90, 50))\r\n draw = ImageDraw.Draw(img)\r\n\r\n elif i == 'centers':\r\n # Show all centers\r\n \r\n rad = 9\r\n for i in self.get_centers():\r\n draw.arc([(i[0] - rad, i[1] - rad),\r\n (i[0] + rad, i[1] + rad)],\r\n 0, 360, (0, 0, 255))\r\n\r\n elif i == 'ditches':\r\n # Show ditches between plants\r\n\r\n # Line attribute settings\r\n width = 10\r\n color = (55,65,65)\r\n\r\n # Iterate over all ditches\r\n for line in self.ditches:\r\n line = [line[0], line[1]]\r\n\r\n # Point in ditch on left border of picture\r\n start_point = (0, line[1])\r\n\r\n # Point in ditch on right border of picture\r\n end_point = (self.picture.get_size()[0] - 1,\r\n line[0]\r\n * (self.picture.get_size()[0] - 1)\r\n + line[1])\r\n\r\n ## Check if the end point is within the picture\r\n if end_point[1] < 0:\r\n if start_point[1] < 0:\r\n continue\r\n \r\n # Point in ditch on top border of picture\r\n end_point = (-1 * line[1] / line[0], 0)\r\n\r\n elif end_point[1] > self.picture.get_size()[1] - 1:\r\n if start_point[1] > self.picture.get_size()[1] - 1:\r\n continue\r\n \r\n # Point in ditch on bottom border of picture\r\n end_point = (-1\r\n * (self.picture.get_size()[1] - 1)\r\n / line[0],\r\n self.picture.get_size()[1] - 1)\r\n\r\n # Draw the ditches\r\n for i in self.get_rows():\r\n draw.line((start_point, end_point), color, width)\r\n\r\n elif i == 'lines':\r\n # Show row line approximations\r\n\r\n # Line attribute settings\r\n width = 1\r\n color = (255, 255, 75)\r\n\r\n # Iterate over all the lines\r\n for line in self.lines:\r\n line = [line[0], line[1]]\r\n\r\n # Point on line on left border of picture\r\n start_point = (0, line[1])\r\n\r\n # Point on line on right border of picture\r\n end_point = (self.picture.get_size()[0] - 1,\r\n line[0]\r\n * (self.picture.get_size()[0] - 1)\r\n + line[1])\r\n\r\n\r\n ## Check if the end point is within the picture\r\n if end_point[1] < 0:\r\n if start_point[1] < 0:\r\n continue\r\n\r\n # Point on line on top border of picture\r\n end_point = (-1 * line[1] / line[0], 0)\r\n \r\n\r\n elif end_point[1] > self.picture.get_size()[1] - 1:\r\n if start_point[1] > self.picture.get_size()[1] - 1:\r\n continue\r\n\r\n # Point on line on bottom border of picture\r\n end_point = (-1\r\n * (self.picture.get_size()[1] - 1)\r\n / line[0],\r\n self.picture.get_size()[1] - 1)\r\n\r\n # Draw the lines\r\n for i in self.get_rows():\r\n draw.line((start_point, end_point), color, width)\r\n\r\n elif i == 'rows':\r\n if self.get_rows():\r\n # Show lines between rows\r\n \r\n width = 3\r\n color = (255,0,0)\r\n\r\n for i in self.get_rows():\r\n draw.line([j.get_center() for j in i], color, width)\r\n else:\r\n print('Rows have not been made for this field')\r\n\r\n elif i == 'numbers':\r\n # Display numbers between plants\r\n\r\n # Find where to put the numbers\r\n midpoints = [(int(round((row[c].get_center()[0]\r\n + row[c + 1].get_center()[0]) / 2)),\r\n int(round((row[c].get_center()[1]\r\n + row[c + 1].get_center()[1]) / 2)))\r\n \r\n for row in self.get_rows()\r\n for c in range(len(row) - 1)]\r\n\r\n # Font specifications\r\n size = 10\r\n font = ImageFont.truetype('ariblk.ttf', size)\r\n num = 1\r\n\r\n # Write numbers\r\n for i in midpoints:\r\n draw.text((i[0] - 3 * len(str(round(num, 1))),\r\n i[1]),\r\n str(round(num,1)), font = font)\r\n \r\n num += 1\r\n\r\n elif i == 'tight':\r\n # Display tight boxes\r\n\r\n for i in self.get_tight_boxes():\r\n draw.rectangle(i, outline=(100, 255, 255))\r\n\r\n elif i == 'distances':\r\n # display distances between plants\r\n\r\n # find where to put the distances\r\n midpoints = [(int(round((row[c].get_center()[0]\r\n + row[c + 1].get_center()[0]) / 2)),\r\n int(round((row[c].get_center()[1]\r\n + row[c + 1].get_center()[1]) / 2)))\r\n for row in self.get_rows()\r\n for c in range(len(row) - 1)]\r\n\r\n # Font specifications\r\n size = 10\r\n font = ImageFont.truetype('arial.ttf', size)\r\n num = 1\r\n\r\n # Write numbers\r\n for i in midpoints:\r\n draw.text((i[0] - 3 * len(str(ruler.get_distances()[num])),\r\n i[1]),\r\n str(ruler.get_distances()[num]) + '\"',\r\n font = font)\r\n \r\n num += 1\r\n\r\n # If the user inputs something that isn't an option \r\n else:\r\n raise Exception(i + ' is not a valid option.\\n')\r\n\r\n return img", "def draw_layout(self):\n\n if not self.fg2d_s_short:\n raise ValueError('This template current only works if source wires of fg2d are shorted.')\n\n threshold = self.params['threshold']\n draw_boundaries = self.params['draw_boundaries']\n num_blk = self.params['num_blk']\n show_pins = self.params['show_pins']\n\n row_list = ['ptap', 'nch', 'pch', 'ntap']\n orient_list = ['R0', 'MX', 'R0', 'MX']\n thres_list = [threshold] * 4\n\n # compute number of tracks\n # note: because we're using thick wires, we need to compute space needed to\n # satisfy DRC rules\n hm_layer = self.conn_layer + 1\n num_g_tracks = [0, 0, 0, 0]\n num_gb_tracks = [0, 0, 0, 0]\n num_ds_tracks = [0, 0, 0, 0]\n\n # to draw special stack driver primitive, we need to enable dual_gate options.\n options = dict(dual_gate=True)\n row_kwargs = [{}, options, options, {}]\n if draw_boundaries:\n end_mode = 15\n else:\n end_mode = 0\n\n # specify row types\n self.set_row_types(row_list, orient_list, thres_list, draw_boundaries, end_mode,\n num_g_tracks, num_gb_tracks, num_ds_tracks, guard_ring_nf=0,\n row_kwargs=row_kwargs)\n\n # determine total number of blocks\n # draw nwell tap\n row_idx = 3\n nw_tap = self.add_laygo_primitive('sub', loc=(0, row_idx), nx=num_blk, spx=1)\n\n # draw pmos row\n row_idx = 2\n pmos = self.add_laygo_primitive('dual_stack2s', loc=(0, row_idx), nx=num_blk, spx=1)\n\n # draw nmos row\n row_idx = 1\n nmos = self.add_laygo_primitive('dual_stack2s', loc=(0, row_idx), nx=num_blk, spx=1)\n\n # draw pwell tap\n row_idx = 0\n pw_tap = self.add_laygo_primitive('sub', loc=(0, row_idx), nx=num_blk, spx=1)\n\n # compute overall block size\n self.set_laygo_size(num_col=num_blk + 4)\n self.fill_space()\n # draw boundaries and get guard ring power rail tracks\n self.draw_boundary_cells()", "def generate(self, diagram):", "def draw(self):\n if self.open:\n self.xpos += (200-self.xpos) * 0.1\n else:\n self.xpos += (-self.xpos) * 0.1\n\n # get the display size\n dispw, disph = c_int(), c_int()\n SDL_GetRendererOutputSize(self.rend,dispw,disph)\n\n # don't waste resources drawing the pallet if it isn't onscreen\n if self.xpos > 5:\n #draw the background for the tile pallet\n SDL_SetRenderDrawColor(self.rend,0,0,0,200)\n rect = SDL_Rect()\n rect.x, rect.y, rect.w, rect.h = round(self.xpos-200),0,200,disph.value\n SDL_RenderFillRect(self.rend,rect)\n\n # draw edge line \n SDL_SetRenderDrawColor(self.rend,255,255,255,255)\n rect.x, rect.y, rect.w, rect.h = round(self.xpos-1),0,1,disph.value\n SDL_RenderFillRect(self.rend,rect)\n\n # draw tile previews\n for i in range(len(self.itemList.items)+1):\n # highlight selected tile\n if i-1 == self.selected:\n rect.x, rect.y, rect.w, rect.h = round(self.xpos-185),i*150+45-self.scroll,138,138\n SDL_SetRenderDrawColor(self.rend,255,255,255,100)\n SDL_RenderFillRect(self.rend,rect)\n # draw tile preview\n rect.x, rect.y, rect.w, rect.h = round(self.xpos-180),i*150+50-self.scroll,128,128\n if i >= 1:\n for x in self.itemList.items[i-1].find('display'):\n if x.tag == 'rect':\n colors = x.find('color').text[1:-1].split(',')\n SDL_SetRenderDrawColor(self.rend,int(colors[0]),int(colors[1]),int(colors[2]),int(colors[3]) if len(colors) > 3 else 255)\n SDL_RenderFillRect(self.rend,rect)\n #SDL_RenderCopy(self.rend,self.tileSet.getTex(i),None,rect)\n SDL_SetRenderDrawColor(self.rend,255,255,255,255)\n\n # draw the file name for the tile\n quickRenderText(self.rend,self.ft_Mono16,self.itemList.items[i-1].find('name').text.strip(),rect.x,rect.y+128)\n else:\n #SDL_RenderCopy(self.rend,self.tileSet.getTex(i),None,rect)\n SDL_SetRenderDrawColor(self.rend,255,255,255,255)\n\n # draw the file name for the tile\n quickRenderText(self.rend,self.ft_Mono16,\"Edit Only\",rect.x,rect.y+128)", "def __init__( self , wingspan = 1.0 ):\n OGLDrawable.__init__( self ) # ------------------------- Parent class init\n \n # 1. Calc and set geo\n length = wingspan / 2.0\n fuseLen = length / 2.0\n depth = fuseLen / 2.0\n \n fuseHalf = fuseLen / 2.0\n dpthHalf = depth / 2.0\n wingHalf = wingspan / 2.0\n \n front = [ fuseHalf , 0.0 , 0.0 ]\n bottom = [ 0.0 , 0.0 , -dpthHalf ]\n back = [ -fuseHalf , 0.0 , 0.0 ]\n top = [ 0.0 , 0.0 , dpthHalf ]\n rghtWTip = [ -length+fuseHalf , -wingHalf , 0.0 ]\n leftWTip = [ -length+fuseHalf , wingHalf , 0.0 ]\n \n self.set_verts( [ front , bottom , back , top , rghtWTip , leftWTip ] )\n # 0 , 1 , 2 , 3 , 4 , 5\n self.triangles = (\n 3 , 0 , 5 , # Top Front Left\n 2 , 3 , 5 , # Top Back Left\n 0 , 1 , 5 , # Bottom Front Left\n 1 , 2 , 5 , # Bottom Back Left\n \n 0 , 3 , 4 , # Top Front Right\n 3 , 2 , 4 , # Top Back Right\n 1 , 0 , 4 , # Bottom Front Right\n 2 , 1 , 4 , # Bottom Back Right\n )\n \n # 2. Set color\n # FIXME", "def mk_dungeon(self, bounding_box, depth=0):\n print \"%s\" % repr(bounding_box)\n edge_buffer = self.edge_min + self.room_min\n room = None\n if ((depth >= self.max_depth)\n or (bounding_box.top + edge_buffer\n > bounding_box.bottom - edge_buffer)\n or (bounding_box.left + edge_buffer\n > bounding_box.right - edge_buffer)):\n room = self.mk_room(bounding_box)\n return room\n\n is_vertical = bool(random.randint(0, 1))\n if is_vertical:\n split = random.randint(bounding_box.left + edge_buffer,\n bounding_box.right - edge_buffer)\n box_1 = Box(bounding_box.top, bounding_box.left,\n bounding_box.bottom, split)\n box_2 = Box(bounding_box.top, split, bounding_box.bottom,\n bounding_box.right)\n else:\n # horizontal split\n split = random.randint(bounding_box.top + edge_buffer,\n bounding_box.bottom - edge_buffer)\n box_1 = Box(bounding_box.top, bounding_box.left, split,\n bounding_box.right)\n box_2 = Box(split, bounding_box.left, bounding_box.bottom,\n bounding_box.right)\n # Room 2 will always be down or right from room 1\n room_1 = self.mk_dungeon(box_1, depth + 1)\n room_2 = self.mk_dungeon(box_2, depth + 1)\n\n # Now we have two \"rooms\" (which may be sub-rooms connected by a\n # corridor), and we need to connect them.\n\n # First see if they share an edge\n\n # print self\n try:\n self.line_connection(room_1, room_2, split, is_vertical)\n except ValueError:\n self.bend_connection(room_1, room_2)\n # print self\n return Box(\n min(room_1.top, room_2.top),\n min(room_1.left, room_2.left),\n max(room_1.bottom, room_2.bottom),\n max(room_1.right, room_2.right)\n )", "def draw(self):\n\t\tpass", "def setup_draw(self):\n pass", "def draw_main_menu():\n draw_cover()\n draw_menu_buttons()\n draw_border()", "def draw_right_zone(self, screen):\n screen.blit(self.right_zone_image, self.right_zone_rect)\n screen.blit(self.right_zone_bottom_image, self.right_zone_bottom_rect)\n screen.blit(self.right_zone_side_image, self.right_zone_side_rect)\n\n if self.selected_tower:\n screen.blit(self.tower_image, self.tower_image_rect)\n screen.blit(self.tower_name, self.tower_name_rect)\n screen.blit(self.tower_level, self.tower_level_rect)\n if self.selected_tower.role == 'attack':\n screen.blit(\n self.tower_attack_image, self.tower_attack_image_rect)\n screen.blit(self.tower_damage, self.tower_damage_rect)\n if self.selected_tower.bonus_damage:\n screen.blit(\n self.tower_bonus_damage, self.tower_bonus_damage_rect)\n screen.blit(\n self.tower_cooldown_image, self.tower_cooldown_image_rect)\n screen.blit(self.tower_cooldown, self.tower_cooldown_rect)\n screen.blit(self.tower_description, self.tower_description_rect)\n elif self.selected_monster:\n screen.blit(self.monster_image, self.monster_image_rect)\n screen.blit(self.monster_name, self.monster_name_rect)\n screen.blit(\n self.monster_health_image, self.monster_health_image_rect)\n screen.blit(self.monster_health, self.monster_health_rect)\n screen.blit(\n self.monster_speed_image, self.monster_speed_image_rect)\n screen.blit(self.monster_speed, self.monster_speed_rect)\n if self.selected_monster_real:\n if self.selected_monster.bonus_speed:\n screen.blit(self.monster_bonus_speed,\n self.monster_bonus_speed_rect)", "def setup_level_2() -> object:\n #create level object\n level = Level()\n\n #create vertical walls for level\n create_and_add_vertical_walls_to_list(4, 19, 4, level.wall_list)\n create_and_add_vertical_walls_to_list(12, 54, 19, level.wall_list)\n create_and_add_vertical_walls_to_list(0, 5, 23, level.wall_list)\n create_and_add_vertical_walls_to_list(0, 4, 30, level.wall_list)\n create_and_add_vertical_walls_to_list(55, settings.HEIGHT, 23, level.wall_list)\n create_and_add_vertical_walls_to_list(55, settings.HEIGHT, 30, level.wall_list)\n create_and_add_vertical_walls_to_list(4, 15, 34, level.wall_list)\n create_and_add_vertical_walls_to_list(24, 54, 34, level.wall_list)\n create_and_add_vertical_walls_to_list(29, 45, 47, level.wall_list)\n create_and_add_vertical_walls_to_list(24, 29, 54, level.wall_list)\n create_and_add_vertical_walls_to_list(44, 54, 54, level.wall_list)\n create_and_add_vertical_walls_to_list(14, 55, 73, level.wall_list)\n\n #create horizontal walls for level\n create_and_add_horiontal_walls_to_list(4, 24, 4, level.wall_list)\n create_and_add_horiontal_walls_to_list(30, 34, 4, level.wall_list)\n create_and_add_horiontal_walls_to_list(20, 24, 14, level.wall_list)\n create_and_add_horiontal_walls_to_list(30, 74, 14, level.wall_list)\n create_and_add_horiontal_walls_to_list(4, 19, 19, level.wall_list)\n create_and_add_horiontal_walls_to_list(34, 54, 24, level.wall_list)\n create_and_add_horiontal_walls_to_list(48, 60, 29, level.wall_list)\n create_and_add_horiontal_walls_to_list(68, 74, 29, level.wall_list)\n create_and_add_horiontal_walls_to_list(48, 60, 44, level.wall_list)\n create_and_add_horiontal_walls_to_list(68, 74, 44, level.wall_list)\n create_and_add_horiontal_walls_to_list(54, 73, 54, level.wall_list)\n create_and_add_horiontal_walls_to_list(19, 24, 54, level.wall_list)\n create_and_add_horiontal_walls_to_list(30, 35, 54, level.wall_list) \n\n #create sword item for \"outfit change\" \n create_and_add_item_to_list(\"pics\\sword_item.png\", 0.05, 75, 100, level.item_list)\n\n #create mysterious figure for level\n create_and_add_character_to_list(\"pics\\mystery_figure.png\", 0.095, 270, 350, level.character_list)\n\n #create dialogue for mysterious figure character\n find_disguise_convo = Dialogue(300, 390, 300, 50, \"Someone will notice you!\\n I've hidden something in the servant's quarters,\\n to make you fit in with the nobility.\")\n level.dialogue_list.append(find_disguise_convo)\n\n #info prompts and text for level\n balcony = RoomInfo(640, 500, \"Balcony. Along with the forest and sea, you can see that a battle is coming.\")\n level.room_info_list.append(balcony)\n kitchen = RoomInfo(270, 90, \"Kitchen. There are plentry of servants around. Your torn clothes are eye-catching, and may sabotage your escape\")\n level.room_info_list.append(kitchen)\n great_hall = RoomInfo(270, 470, \"Great hall. You could have sworn that someone recognized you, but nobody acts to capture you.\")\n level.room_info_list.append(great_hall)\n sitting_room = RoomInfo(650, 230, \"Private sitting room. You find several sketches... sketches that look like a richer, healthier version of you.\")\n level.room_info_list.append(sitting_room)\n\n return level", "def root_wdgt(self):\n self.summarize()\n modes = ['Global', 'Single-Image']\n\n def logic(mode):\n # cache the widget later\n if mode == modes[0]:\n if self.global_walk is None:\n self.global_walk = self.global_walk_specifier()\n ipy_display(self.global_walk)\n elif mode == modes[1]:\n self.image_view = self.single_image_selector()\n # if self.image_view is None:\n # self.image_view = self.single_image_selector()\n # ipy_display(self.image_view)\n\n UI = interactive(\n logic, mode=widgets.ToggleButtons(options=modes, value=modes[0])\n )\n UI.children[-1].layout.height = '1000px'\n ipy_display(UI)", "def decorate_scene():\n make_polygon( (100,100),(120,140),(270,70) )\n make_polygon( (300,10), (300,550), (340,452),(380,300), (330,50))\n make_polygon( (200,450), (100,450), (100,500), (200,500) )\n make_polygon( (130,320), (150,300), (140,280) )\n return", "def simple_canvas(self):\n self.canvas = Canvas()\n\n self.box1 = Box()\n self.canvas.add(self.box1)\n self.box1.matrix.translate(100, 50)\n self.box1.width = 40 \n self.box1.height = 40 \n self.box1.request_update()\n\n self.box2 = Box()\n self.canvas.add(self.box2)\n self.box2.matrix.translate(100, 150)\n self.box2.width = 50 \n self.box2.height = 50 \n self.box2.request_update()\n\n self.line = Line()\n self.head = self.line.handles()[0]\n self.tail = self.line.handles()[-1]\n self.tail.pos = 100, 100\n self.canvas.add(self.line)\n\n self.canvas.update_now()\n self.view = GtkView()\n self.view.canvas = self.canvas\n from gi.repository import Gtk\n win = Gtk.Window()\n win.add(self.view)\n self.view.show()\n self.view.update()\n win.show()\n\n self.tool = ConnectHandleTool(self.view)", "def view2dToolCtx(*args, alternateContext: bool=True, boxzoom: bool=True, dolly: bool=True,\n exists: bool=True, history: bool=True, image1: Union[AnyStr, bool]=\"\",\n image2: Union[AnyStr, bool]=\"\", image3: Union[AnyStr, bool]=\"\", name:\n AnyStr=\"\", toolName: Union[AnyStr, bool]=\"\", track: bool=True, q=True,\n query=True, e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def draw_draughts():\n global red_draughts\n global white_draughts\n global board_array\n if(red_draughts == []):\n red_draughts = [board.create_oval(0,0,board_divisions,board_divisions,fill=\"red\") for i in xrange(0,15)]\n white_draughts = [board.create_oval(0,0,board_divisions,board_divisions,fill=\"white\")for i in xrange(0,15)]\n #And create event handlers for dragging these\n for draught in red_draughts:\n board.tag_bind(draught, \"<Button-1>\", move_draught_begin)\n board.tag_bind(draught, \"<B1-Motion>\", move_draught)\n board.tag_bind(draught, \"<ButtonRelease-1>\", move_draught_end)\n for draught in white_draughts:\n board.tag_bind(draught, \"<Button-1>\", move_draught_begin)\n board.tag_bind(draught, \"<B1-Motion>\", move_draught)\n board.tag_bind(draught, \"<ButtonRelease-1>\", move_draught_end)\n\n unmoved_red = list(red_draughts)\n unmoved_white = list(white_draughts)\n red_draughts = []\n white_draughts = []\n print board_array\n for i in xrange(1,len(board_array)-1): #Handle Points, ends and bar handled as special cases\n #Calculate where left side of draughts should be, and whether on top or bottom\n if i <= 6:\n left_side = board_divisions*(8+(6-i))\n bottom = True\n elif i <= 12:\n left_side = board_divisions*(1+(12-i))\n bottom = True\n elif i <= 18:\n bottom = False\n left_side = board_divisions*(1+(i-13))\n else: \n bottom = False\n left_side = board_divisions*(8+(i-19))\n #Move red draughts to right places\n for j in xrange(board_array[i][0]):\n temp = unmoved_red.pop()\n if(bottom == True):\n board.coords(temp,left_side+board_divisions//10*(j//5),board_divisions*(9-(j%5)),left_side+board_divisions+board_divisions//10*(j//5),board_divisions*(10-(j%5)))\n else:\n board.coords(temp,left_side+board_divisions//10*(j//5),board_divisions*(j%5),left_side+board_divisions+board_divisions//10*(j//5),board_divisions*((j%5)+1))\n red_draughts.append(temp)\n #Now white\n for j in xrange(board_array[i][1]):\n temp = unmoved_white.pop()\n if(bottom == True):\n board.coords(temp,left_side+board_divisions//10*(j//5),board_divisions*(9-(j%5)),left_side+board_divisions+board_divisions//10*(j//5),board_divisions*(10-(j%5)))\n else:\n board.coords(temp,left_side+board_divisions//10*(j//5),board_divisions*(j%5),left_side+board_divisions+board_divisions//10*(j//5),board_divisions*((j%5)+1))\n white_draughts.append(temp)\n #Handle white end, red bar\n #Move red draughts to right places on bar\n for j in xrange(board_array[0][0]):\n temp = unmoved_red.pop()\n board.coords(temp,7*board_divisions+board_divisions//10*(j//4),board_divisions*(9-(j%4)),7*board_divisions+board_divisions+board_divisions//10*(j//4),board_divisions*(10-(j%4)))\n red_draughts.append(temp)\n\n #Now white to places in goal\n for j in xrange(board_array[0][1]):\n temp = unmoved_white.pop()\n board.coords(temp,14*board_divisions+board_divisions//10*(j//4),board_divisions*(9-(j%4)),14*board_divisions+board_divisions+board_divisions//10*(j//4),board_divisions*(10-(j%4)))\n white_draughts.append(temp)\n #Handle red end, white\n #Move white draughts to right places on bar\n\n for j in xrange(board_array[25][1]):\n temp = unmoved_white.pop()\n board.coords(temp,7*board_divisions+board_divisions//10*(j//4),board_divisions*(j%4),7*board_divisions+board_divisions+board_divisions//10*(j//4),board_divisions*((j%4)+1))\n white_draughts.append(temp)\n\n #Now red to places in goal\n for j in xrange(board_array[25][0]):\n temp = unmoved_red.pop()\n board.coords(temp,14*board_divisions,board_divisions*j,15*board_divisions,board_divisions*(j+1))\n board.coords(temp,14*board_divisions+board_divisions//10*(j//4),board_divisions*(j%4),14*board_divisions+board_divisions+board_divisions//10*(j//4),board_divisions*((j%4)+1))\n red_draughts.append(temp)\n if(board_array[25][0] == 15):\n print \"You win!\"", "def __handle_view_door(self, gamestate_component):", "def create_screen(self, width, height):", "def draw(self, context):\n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n Add_Backing = self.product.get_prompt(\"Add Backing\")\n Backing_Thickness = self.product.get_prompt(\"Backing Thickness\")\n Add_Top = self.product.get_prompt(\"Add Top KD\")\n Remove_Left_Side = self.product.get_prompt(\"Remove Left Side\")\n Remove_Right_Side = self.product.get_prompt(\"Remove Right Side\")\n Hide_Toe_Kick = self.product.get_prompt(\"Hide Toe Kick\")\n Door = self.product.get_prompt(\"Door\")\n Use_Left_Swing = self.product.get_prompt(\"Use Left Swing\")\n Pull_Location = self.product.get_prompt(\"Pull Location\")\n Force_Double_Doors = self.product.get_prompt(\"Force Double Doors\")\n Door_Pull_Height = self.product.get_prompt(\"Door Pull Height\")\n Door_Type = self.product.get_prompt(\"Door Type\")\n Angled_Shelves = self.product.get_prompt(\"Angled Shelves\")\n Open_Door = self.product.get_prompt(\"Open Door\")\n Base_Pull_Location = self.product.get_prompt(\"Base Pull Location\")\n Tall_Pull_Location = self.product.get_prompt(\"Tall Pull Location\")\n Upper_Pull_Location = self.product.get_prompt(\"Upper Pull Location\")\n \n layout = self.layout\n self.draw_product_size(layout) \n \n if Left_Depth:\n box = layout.box()\n row = box.row()\n Left_Depth.draw_prompt(row)\n \n if Right_Depth:\n Right_Depth.draw_prompt(row)\n \n if Shelf_Quantity:\n col = box.column(align=True)\n row = col.row()\n row.label(\"Qty:\")\n row.prop(self,\"shelf_quantity\",expand=True) \n col.separator()\n \n if Add_Backing:\n row = box.row()\n Add_Backing.draw_prompt(row)\n\n #if Backing_Thickness:\n # if Add_Backing.value() == True:\n # row = box.row()\n # Backing_Thickness.draw_prompt(row)\n \n \n if Add_Top:\n row = box.row()\n Add_Top.draw_prompt(row) \n\n if Remove_Left_Side:\n row = box.row()\n Remove_Left_Side.draw_prompt(row) \n \n if Remove_Right_Side:\n row = box.row()\n Remove_Right_Side.draw_prompt(row) \n \n \n # row = box.row()\n # Hide_Toe_Kick.draw_prompt(row)\n \n row = box.row()\n Door.draw_prompt(row)\n if Door.value():\n if Angled_Shelves and Door_Type:\n if Angled_Shelves.value() == False:\n row = box.row()\n row.prop(self,'Door_Type',text=\"Door Type\")\n #Door_Type.draw_prompt(row)\n row = box.row()\n #Pull_Location.draw_prompt(row)\n row.prop(self,'Pull_Location',text=\"Pull Location\")\n\n row = box.row()\n #Door_Pull_Height.draw_prompt(row)\n row.prop(self,'Pull_Type',text=\"Pull Type\")\n row = box.row()\n if self.Pull_Type == 'Base':\n Base_Pull_Location.draw_prompt(row)\n elif self.Pull_Type == 'Tall':\n Tall_Pull_Location.draw_prompt(row)\n else:\n Upper_Pull_Location.draw_prompt(row)\n\n if Open_Door: \n row = box.row()\n Open_Door.draw_prompt(row)\n\n if Angled_Shelves:\n if Angled_Shelves.value():\n row = box.row()\n Use_Left_Swing.draw_prompt(row)\n row = box.row() \n Force_Double_Doors.draw_prompt(row)", "def draw(self, context):\n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n Add_Backing = self.product.get_prompt(\"Add Backing\")\n Backing_Thickness = self.product.get_prompt(\"Backing Thickness\")\n Add_Top = self.product.get_prompt(\"Add Top KD\")\n Remove_Left_Side = self.product.get_prompt(\"Remove Left Side\")\n Remove_Right_Side = self.product.get_prompt(\"Remove Right Side\")\n Hide_Toe_Kick = self.product.get_prompt(\"Hide Toe Kick\")\n Door = self.product.get_prompt(\"Door\")\n Use_Left_Swing = self.product.get_prompt(\"Use Left Swing\")\n Pull_Location = self.product.get_prompt(\"Pull Location\")\n Force_Double_Doors = self.product.get_prompt(\"Force Double Doors\")\n Door_Pull_Height = self.product.get_prompt(\"Door Pull Height\")\n Door_Type = self.product.get_prompt(\"Door Type\")\n Angled_Shelves = self.product.get_prompt(\"Angled Shelves\")\n Open_Door = self.product.get_prompt(\"Open Door\")\n Base_Pull_Location = self.product.get_prompt(\"Base Pull Location\")\n Tall_Pull_Location = self.product.get_prompt(\"Tall Pull Location\")\n Upper_Pull_Location = self.product.get_prompt(\"Upper Pull Location\")\n \n layout = self.layout\n self.draw_product_size(layout) \n \n if Left_Depth:\n box = layout.box()\n row = box.row()\n Left_Depth.draw_prompt(row)\n \n if Right_Depth:\n Right_Depth.draw_prompt(row)\n \n if Shelf_Quantity:\n col = box.column(align=True)\n row = col.row()\n row.label(\"Qty:\")\n row.prop(self,\"shelf_quantity\",expand=True) \n col.separator()\n \n if Add_Backing:\n row = box.row()\n Add_Backing.draw_prompt(row)\n\n #if Backing_Thickness:\n # if Add_Backing.value() == True:\n # row = box.row()\n # Backing_Thickness.draw_prompt(row)\n \n \n if Add_Top:\n row = box.row()\n Add_Top.draw_prompt(row) \n\n if Remove_Left_Side:\n row = box.row()\n Remove_Left_Side.draw_prompt(row) \n \n if Remove_Right_Side:\n row = box.row()\n Remove_Right_Side.draw_prompt(row) \n \n \n # row = box.row()\n # Hide_Toe_Kick.draw_prompt(row)\n \n row = box.row()\n Door.draw_prompt(row)\n if Door.value():\n if Angled_Shelves and Door_Type:\n if Angled_Shelves.value() == False:\n row = box.row()\n row.prop(self,'Door_Type',text=\"Door Type\")\n #Door_Type.draw_prompt(row)\n row = box.row()\n #Pull_Location.draw_prompt(row)\n row.prop(self,'Pull_Location',text=\"Pull Location\")\n\n row = box.row()\n #Door_Pull_Height.draw_prompt(row)\n row.prop(self,'Pull_Type',text=\"Pull Type\")\n row = box.row()\n if self.Pull_Type == 'Base':\n Base_Pull_Location.draw_prompt(row)\n elif self.Pull_Type == 'Tall':\n Tall_Pull_Location.draw_prompt(row)\n else:\n Upper_Pull_Location.draw_prompt(row)\n\n if Open_Door: \n row = box.row()\n Open_Door.draw_prompt(row)\n\n if Angled_Shelves:\n if Angled_Shelves.value():\n row = box.row()\n Use_Left_Swing.draw_prompt(row)\n row = box.row() \n Force_Double_Doors.draw_prompt(row)", "def draw(self, win):\n img = self.tower_imgs\n win.blit(img, (self.x - img.get_width() // 2, self.y - img.get_height() // 2))\n\n if self.selected:\n self.menu.draw(win)", "def draw_house_walls(x, y, width, height):\n print('Типа рисую стены...', x, y, width, height)", "def draw(self) -> Any:", "def part2(mem):\n panels = paint_panels(mem, 1)\n min_x, min_y, max_x, max_y = tuple(\n f(k[i] for k in panels.keys()) for f in (min, max) for i in (0, 1)\n )\n drawing = [[\" \"] * (max_x - min_x + 1) for _ in range(min_y, max_y + 1)]\n for (x, y), col in panels.items():\n if col:\n drawing[y][x] = \"\\u2588\"\n return \"\\n\".join(f\" {''.join(line)}\" for line in drawing)", "def door(doorX, doorY, occurrency):\n\tdef door0(dx, dy, dz):\n\n\t\tmodel = []\n\n\t\tfor xIndex in range(len(doorX)):\n\t\t\tyQuotes = []\n\t\t\txSum = sum(doorX[:xIndex])\n\t\t\tfor yIndex in range(len(doorY)):\n\t\t\t\tif(occurrency[xIndex][yIndex] == False):\n\t\t\t\t\tyQuotes.append(-doorY[yIndex])\n\t\t\t\telse:\n\t\t\t\t\tyQuotes.append(doorY[yIndex])\n\t\t\tmodel.append(PROD([ QUOTE([-xSum, doorX[xIndex]]), QUOTE(yQuotes)]))\n\n\t\tres = PROD([STRUCT(model), Q(dy)])\n\t\tres = MAP([S2,S3,S1])(res)\n\t\tres = S([1,2,3])([dx/SIZE([1])(res)[0], dy/SIZE([2])(res)[0], dz/SIZE([3])(res)[0]]) (res)\n\n\t\tdoor = TEXTURE([\"wood.jpg\", True, False, 1, 1, 0, 1, 1])(STRUCT([res]))\n\n\t\tglass = CUBOID([SIZE([1])(res)[0]*0.94, 0.01, SIZE([3])(res)[0]*0.94])\n\t\tglass = T([1,2,3])([dx*0.003, dy/2, dz*0.005])(glass)\n\t\tglass = TEXTURE([\"glass.jpg\"])(glass)\n\n\t\trefiner = CUBOID([0.03, 0.01,dz])\n\t\trefiner = T([1,2])([dx/2,dy])(refiner)\n\t\trefiner = TEXTURE([\"wood.jpg\", True, False, 1, 1, 0, 1, 1])(refiner)\n\n\t\thandler1 = T(3)(.15)(CUBOID([.05,.02,.2]))\n\t\thandler2 = CUBOID([.05,.02,.05])\n\t\thandler3 = T([1,2])([.01,.02])(CUBOID([.03,.02,.2]))\n\t\thandler = TEXTURE(\"bronze.jpg\")(STRUCT([handler3, handler2, handler1]))\n\t\thandler = T([1,2,3])([dx/2.-2*SIZE([1])(handler)[0],dy, dz/2.-1.5*SIZE([3])(handler)[0]])(handler)\n\n\t\tfinalDoor = S([1,2,3])([dx/SIZE([1])(res)[0], dy/SIZE([2])(res)[0], dz/SIZE([3])(res)[0]]) (STRUCT([door, glass, refiner, handler]))\n\n\t\treturn finalDoor\n\n\treturn door0", "def draw_tree(self):\r\n color = self.food_color if 0 != self.current_food else self.no_food_color\r\n random.seed()\r\n for i in range(self.fruit_number):\r\n x = random.randint(0, self.surface_width - self.fruit_size)\r\n y = random.randint(0, self.surface_height - self.fruit_size)\r\n self.fruit_list.append((x,y))\r\n pygame.draw.rect(self.surface, color, (x, y, self.fruit_size, self.fruit_size))", "def build_dl(self):\n level = self.level\n self.cells = {} # {(u,v):dl_offset}\n if self.ndl:\n glDeleteLists(self.dlbase, self.ndl)\n # first display list is a blank cell\n self.celltypes = {' ':CellType(0,True)}\n for i,ct in enumerate(sorted(level.celltypes)):\n self.celltypes[ct] = CellType(i+1,ct in \" SXO\")\n numtypes = len(level.celltypes)\n for coords,cellcode in level.hexes.items():\n ct = self.celltypes[cellcode]\n self.cells[coords] = ct.n\n self.ndl = numtypes\n self.dlbase = glGenLists(self.ndl)\n # Compile display lists\n for k,ct in self.celltypes.items():\n with gl_compile(self.dlbase + ct.n):\n # space or player start\n if k[0] in \" S\":\n glLineWidth(1 if k[0] == \" \" else 3)\n glColor4f(*cellcolour(k))\n with gl_begin(GL_LINE_LOOP):\n for x,y in hexcorners:\n glVertex2f(x,y)\n elif k[0] == \"H\": # hexagon tile\n glColor4f(*cellcolour(k))\n glCallList(TILE_OBJECTS[\"H\"].mesh_dls[\"hex\"])\n elif k in [\"Au\",\"Ag\",\"Cu\",\"Pt\"]:\n glColor4f(1.0,1.0,1.0,1.0)\n glCallList(TILE_OBJECTS[k].mesh_dls[\"hex\"])\n elif k[0] in \"#^v<>OL\": #wall\n glColor4f(*cellcolour(k))\n glCallList(TILE_OBJECTS[k[0]].mesh_dls[\"hex\"])\n elif k[0] == \"X\": # exit\n glColor4f(*cellcolour(k))\n glLineWidth(3)\n with gl_begin(GL_LINES): # STUB\n for x,y in [\n (-0.5,-0.5), (0.5,0.5),\n (-0.5,0.5), (0.5,-0.5)]:\n glVertex2f(x,y)\n elif k[0] == \"P\": #powerup square\n with gl_begin(GL_TRIANGLE_FAN):\n glColor4f(1.0,1.0,0.0,1.0)\n glVertex2f(0,0)\n glColor4f(0.5,0.0,0.0,0.1)\n for x,y in hexcorners:\n glVertex2f(x,y)\n glVertex2f(*hexcorners[0])\n\n self.all_dl = glGenLists(1)", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self):\n pass", "def draw_menu(self, context):\n if context.engine == 'RPR':\n layout = self.layout\n layout.popover('RPR_VIEW3D_PT_panel')", "def show(self,canvas): \n for piece in self.bluh:\n piece.render(canvas)\n\n #create vertical and horizontal bold outline\n for i in range(len(self.board)+1):\n x0=300+self.piecesize*i\n y0=100\n x1=300+self.piecesize*i\n y1=900\n canvas.create_line(x0,y0,x1,y1,width=5,fill=self.mode.color1)\n for a in range(len(self.board)+1):\n for i in range(len(self.board)+1):\n x2=300\n y2=100+self.piecesize*i\n x3=1100\n y3=100+self.piecesize*i\n canvas.create_line(x2,y2,x3,y3,width=5,fill=self.mode.color1)\n for piece in self.bluh:\n if piece.isselected==True:\n piece.dropShadow(canvas)\n piece.render(canvas)\n #print(piece.__repr__())", "def draw_flower_advanced():\n draw_flower()\n turtle.left(90)\n turtle.up()\n turtle.forward(150)\n turtle.left(90)\n turtle.forward(150)\n turtle.right(90)\n turtle.down()", "def draw_list(self, side):\n g = cf.gs.game.events\n if side == 'left':\n self.scene.remove_child(self.left_list)\n x = PygameUI.List(\n g.show_inactive_events(), (200, 224, 200),\n self.click_event_list1\n )\n x.frame = pygame.Rect(4, 4+30, 150, Menu.scene.frame.h -8-30)\n #~ x.frame.w = x.container.frame.w\n x.border_width = 1\n x.container.draggable = True\n self.scene.add_child(x)\n self.left_list = x\n elif side == 'right':\n self.scene.remove_child(self.right_list)\n x = PygameUI.List(\n g.show_active_events(), (200, 224, 200),\n self.click_event_list2\n )\n x.frame = pygame.Rect(Menu.scene.frame.w -154, 4+30, 150, Menu.scene.frame.h -8-30)\n #~ x.frame.w = x.container.frame.w\n x.border_width = 1\n x.container.draggable = True\n self.scene.add_child(x)\n self.right_list = x", "def create_scene():\n create_floor()\n if config.M != \"\":\n if config.LEVEL == 1:\n create_wall()\n create_enemy()\n create_gap()\n create_platform()\n create_marijuana()\n create_star()\n create_fish()\n elif config.LEVEL == 2:\n create_boss()\n create_platform()\n create_star()", "def drawme(self, diag, pos, flip, tags=tuple()):\n p = pos.copy()\n\n # intag is attached to items that should be considered\n # inside the box\n intag = self.tags[0] + 'inside'\n\n # draw the bindings\n for binding in self.bindings:\n # check whether the key was already drawn\n drawn = binding.key.isdrawn()\n\n # draw the binding\n binding.draw(diag, p, flip, tags=tags)\n\n # apply intag to the dots \n self.canvas.addtag_withtag(intag, binding.dot.tags)\n if drawn:\n # if the key was already drawn, then the binding\n # contains two dots, so we should add intag to the\n # second one.\n if binding.dot2:\n self.canvas.addtag_withtag(intag, binding.dot2.tags)\n else:\n # if the key wasn't drawn yet, it should be\n # considered inside this mapping\n self.canvas.addtag_withtag(intag, binding.key.tags)\n\n # move down to the position for the next binding\n p.y = binding.bbox().bottom + 1.8\n\n if len(self.bindings):\n # if there are any bindings, draw a box around them\n bbox = self.canvas.bbox(intag)\n item = self.canvas.box(bbox, tags=tags, **self.boxoptions)\n else:\n # otherwise just draw a box\n bbox = BBox([p.copy(), p.copy()])\n item = self.canvas.box(bbox, padx=0.4, pady=0.4, tags=tags,\n **self.boxoptions)\n\n # make the box clickable\n self.bind(item)\n self.boxitem = item\n\n # put the label above the box\n if self.label:\n p = bbox.upperleft()\n item = self.canvas.str(p, self.label, anchor=SW,\n font=smallfont, tags=tags)\n # make the label clickable\n self.bind(item)\n\n # if the whole mapping is not in the right position, shift it.\n if flip == 1:\n dx = pos.x - self.bbox().left\n else:\n dx = pos.x - self.bbox().right\n\n self.canvas.move(self.tags, dx, 0, transform=True)", "def build_wall(self, type, pos1, pos2, thickness=1):\n raise NotImplementedError", "def Draw_Tree( self, rooted_tree, menuoptions = 0, editor = 0 ):\r\n #Clear the previous information\r\n self.Reset_Selection()\r\n self.canvas_one.delete( ALL )\r\n self.canvas_two.delete( ALL )\r\n self.handle_list = []\r\n \r\n if editor:\r\n self.Adjust_Menu( menuoptions )\r\n #if no node\r\n if( rooted_tree == 0 ):\r\n self.canvas_one.create_text( cb.xorigin, 5, text=\"There is no tree to display\", anchor = NW )\r\n ys = 0\r\n #one node\r\n elif( rooted_tree.sub == [] ):\r\n #if there is only one node, make its length one because a zero length will not show up\r\n store = rooted_tree.data.length\r\n rooted_tree.data.length = 1\r\n xlong = rooted_tree.Longest_Branch( )\r\n cb.New_XLong( xlong )\r\n ys = self.Draw_Node( rooted_tree, cb.xorigin, cb.yorigin)\r\n rooted_tree.data.length = store\r\n else:\r\n #recursively draw the tree, temporarily store the root's length and make it zero\r\n #If the root is long(Isolated), it does not squish the rest of the data\r\n store = rooted_tree.data.length\r\n rooted_tree.data.length = 0\r\n #Get the longest distance from root to leaf\r\n xlong = rooted_tree.Longest_Branch( )\r\n cb.New_XLong( xlong ) #Change the scale\r\n ys, ypos1 = self.Rec_Draw_Tree( rooted_tree, cb.xorigin, cb.yorigin )\r\n #Extend the root node so that it is visible\r\n ls = self.Find_Line_By_Node( rooted_tree )\r\n self.canvas_one.coords( ls.line_handle, cb.xorigin-5, ypos1, cb.xorigin, ypos1 )\r\n rooted_tree.data.length = store #restore the root node's length\r\n ys = ys + cb.ytick\r\n self.canvas_one.create_text(20,ys,text=\"_____\")\r\n self.canvas_two.create_text(20,ys,text=\"_____\") #end markers\r\n #Set the scrollregions of the canvases\r\n ys = ys + cb.ytick\r\n self.ys = ys + 0*cb.ytick\r\n self.canvas_one.config( scrollregion = ( 0, 0, 300, self.ys ) )\r\n self.canvas_two.config( scrollregion = ( 0, 0, 300, self.ys ) )\r\n self.Draw_Scale()", "def draw_house_foundation(x, y, width, height):\n print('Типа рисую фундамент...', x, y, width, height)", "def draw_room(screen, grid, start_location):\n wall_image = pygame.image.load(\"images/pillar.png\")\n wall_image_transparent = pygame.image.load(\"images/pillar_80.png\")\n floor_image = pygame.image.load(\"images/floor.png\")\n computer_image = pygame.image.load(\"images/desk_computer.png\")\n\n # map_to_image = [floor_image, # 0\n # wall_image, # 1\n # wall_image_transparent, # 2\n # computer_image] # 3\n map_to_image = {\n \"0\": floor_image,\n \"1\": wall_image,\n \"2\": wall_image_transparent,\n \"3\": computer_image,\n \"10\": wall_image # Secret passage\n }\n # better tile management for multiple environments / create multiple environments.\n # 0 = floor, 1 = wall (pillar)\n # First draw floor everywhere\n max_dimensions = grid.shape\n for r in range(max_dimensions[0]):\n for c in range(max_dimensions[1]):\n screen.blit(floor_image, (c * 30 + start_location[0],\n r * 30 + start_location[1]))\n\n for tile_type in [1, 2, 3, 10]:\n the_rows, the_cols = np.where(grid == tile_type)\n for i in range(len(the_cols)):\n screen.blit(map_to_image[str(tile_type)], (the_cols[i] * 30 + start_location[0],\n the_rows[i] * 30 + start_location[1]))", "def create_custom_graph(radius, thickness):\n\n global screen\n generating = True\n\n # Number of vertices created and the two vertices to connect\n vertices_created = 0\n vtx_one = None\n vtx_two = None\n\n while generating:\n\n for event in pygame.event.get():\n\n # Get all mouse click events\n if event.type == pygame.MOUSEBUTTONDOWN:\n\n # Store the click position's coordinates\n mouse_x, mouse_y = pygame.mouse.get_pos()\n\n # Get all keys pressed\n keys = pygame.key.get_pressed()\n\n # Create a vertex when clicking and pressing 'v'\n if keys[pygame.K_v]:\n vtx = {\"ID\": vertices_created,\n \"x\": mouse_x,\n \"y\": mouse_y,\n \"color\": \"WHITE\",\n \"adjacent\": [],\n }\n VERTICES.append(vtx);\n vertices_created += 1\n\n # Set the source vertex to whichever vertex was clicked on\n for vtx in VERTICES:\n if (is_clicked(vtx[\"x\"], vtx[\"y\"], mouse_x, mouse_y, RADIUS)):\n vtx_one = vtx\n \n if event.type == pygame.MOUSEBUTTONUP:\n\n # Store the click position's coordinates\n mouse_x, mouse_y = pygame.mouse.get_pos()\n\n # Set the destination vertex to whichever vertex was under the\n # cursor after the click\n for vtx in VERTICES:\n if (is_clicked(vtx[\"x\"], vtx[\"y\"], mouse_x, mouse_y, RADIUS)):\n vtx_two = vtx\n\n # If the source and destination vertices have values, connect them\n if vtx_one is not None and vtx_two is not None and vtx_one[\"ID\"] is not vtx_two[\"ID\"]:\n vtx_one[\"adjacent\"].append(vtx_two[\"ID\"])\n vtx_two[\"adjacent\"].append(vtx_one[\"ID\"])\n \n\n\n if event.type == pygame.KEYDOWN:\n\n # Reset the graph generation if 'r' is pressed\n if event.key == pygame.K_r:\n vertices_created = 0\n VERTICES.clear()\n vtx_one = None\n vtx_two = None\n screen.fill(BACKGROUND)\n\n # Delete the most recently made vertex and all of its adjacencies\n if event.key == pygame.K_u and vertices_created >= 1:\n vertices_created -= 1\n deleted = VERTICES.pop()\n for adj in deleted[\"adjacent\"]:\n VERTICES[adj][\"adjacent\"].remove(deleted[\"ID\"])\n vtx_one = None\n vtx_two = None\n screen.fill(BACKGROUND)\n \n # Delete the most recently drawn edge\n if event.key == pygame.K_e and vertices_created >= 2:\n if vtx_one[\"adjacent\"] and vtx_two[\"adjacent\"]:\n vtx_one[\"adjacent\"].pop()\n vtx_two[\"adjacent\"].pop()\n screen.fill(BACKGROUND)\n\n\n\n # Close window on pressing ESC\n if event.key == pygame.K_ESCAPE or event.key == pygame.K_c:\n generating = False\n\n # If the window is closed, exit the game\n if event.type == pygame.QUIT:\n generating = False\n \n draw_graph(VERTICES, RADIUS, THICKNESS)\n pygame.display.update()", "def draw_card(dealer,player):\n # hidden_img = Image(img_path+\"back.png\")\n depth = 100\n x0,y0 = 100,100\n x1,y1 = 100,300\n ix = 30\n\n bj_board.clear()\n for card in dealer:\n if card.state:\n card.image.moveTo(x0, y0)\n card.image.setDepth(depth)\n bj_board.add(card.image)\n else:\n img = Image(img_path+\"Back.png\")\n img.moveTo(x0, y0)\n img.setDepth(depth)\n bj_board.add(img)\n x0 += ix\n \n for card in player:\n if card.state:\n card.image.moveTo(x1, y1)\n card.image.setDepth(depth)\n bj_board.add(card.image)\n else:\n img = Image(img_path+\"back.png\")\n img.moveTo(x1, y1)\n img.setDepth(depth)\n bj_board.add(img)\n x1 += ix", "def figura(self):\n\n dx=10.6\n dy=11.5\n ###################\n #primero a dibujar#\n ###################\n glBegin(GL_POLYGON)\n glVertex2f(1.0,2*dy)\n glVertex2f(dx,5*dy/2)\n glVertex2f(1.0,45.0)\n glVertex2f(3*dx/2,3*dy)\n glVertex2f(5*dx/2,45.0)\n glVertex2f(7*dx/2,3*dy)\n glVertex2f(52.0,45.0)\n glVertex2f(4*dx,5*dy/2)\n glVertex2f(52.0,2*dy)\n glVertex2f(4*dx,3*dy/2)\n glVertex2f(52.0,1.0)\n glVertex2f(7*dx/2,dy)\n glVertex2f(5*dx/2,1.0)\n glVertex2f(3*dx/2,dy)\n glVertex2f(1.0,1.0)\n glVertex2f(dx,3*dy/2)\n glEnd()\n glColor3f(138.0/255,138.0/255,138.0/255)\n glBegin(GL_TRIANGLES)\n glVertex2f(1.0,1.0)\n glVertex2f(1.0,2*dy)\n glVertex2f(2*dx/3,3*dy/2)\n glEnd()\n glBegin(GL_TRIANGLES)\n glVertex2f(1.0,2*dy)\n glVertex2f(2*dx/3,5*dy/2)\n glVertex2f(1.0,45.0)\n glEnd()\n ###################\n #segundo a dibujar#\n ###################\n glColor3f(249.0/255,135.0/255,21.0/255)\n glBegin(GL_TRIANGLES)\n glVertex2f(3*dx/2,2*dy)\n glVertex2f(3*dx/2,3*dy)\n glVertex2f(3*dx/2+dx,3*dy)\n glEnd()\n glBegin(GL_TRIANGLES)\n glVertex2f(3*dx/2+dx,3*dy)\n glVertex2f(7*dx/2,3*dy)\n glVertex2f(7*dx/2,2*dy)\n glEnd()\n glBegin(GL_TRIANGLES)\n glVertex2f(7*dx/2,2*dy)\n glVertex2f(7*dx/2,dy)\n glVertex2f(5*dx/2,dy)\n glEnd()\n glBegin(GL_TRIANGLES)\n glVertex2f(5*dx/2,dy)\n glVertex2f(3*dx/2,dy)\n glVertex2f(3*dx/2,2*dy)\n glEnd()\n #segundos triangulos naranjos\n glBegin(GL_TRIANGLES)\n glVertex2f(3*dx/2,5*dy/2)\n glVertex2f(3*dx/2,3*dy/2)\n glVertex2f(dx,2*dy)\n glEnd()\n glBegin(GL_TRIANGLES)\n glVertex2f(2*dx,3*dy)\n glVertex2f(5*dx/2,7*dy/2)\n glVertex2f(3*dx,3*dy)\n glEnd()\n glBegin(GL_TRIANGLES)\n glVertex2f(7*dx/2,5*dy/2)\n glVertex2f(4*dx,2*dy)\n glVertex2f(7*dx/2,3*dy/2)\n glEnd()\n glBegin(GL_TRIANGLES)\n glVertex2f(3*dx,dy)\n glVertex2f(5*dx/2,dy/2)\n glVertex2f(2*dx,dy)\n glEnd()\n ##################\n #ultimo a dibujar#\n ##################\n xini=3*dx/2\n yini=2*dy\n glColor3f(0.8,0.0,0.0)\n glBegin(GL_QUADS)\n glVertex2f(xini,yini)\n glVertex2f(xini+dx,yini+dy)\n glVertex2f(xini+2*dx,yini)\n glVertex2f(xini+dx,yini-dy)\n glEnd()", "def default_door():\n X = [0.0, 0.14, 1.12, 1.26]\n Y = [0.0, 0.14, 2.24]\n Z = [-0.14, 0.14]\n V, F = True, False\n occupancy = [\n [[V], [V]],\n [[V], [F]],\n [[V], [V]]\n ]\n return w7.window(X, Y, Z, occupancy)", "def render_outline_2d(self, **kwds):\n wireframe = [];\n for l in self.lines:\n l_coords = self.coordinates_of(l)\n wireframe.append( line2d(l_coords, **kwds) )\n for a in self.arrows:\n a_coords = self.coordinates_of(a)\n wireframe.append( arrow(a_coords[0], a_coords[1], **kwds) )\n return sum(wireframe)", "def setup(x, y, w, h, t):\n filldraw_rectangle(x,y,w,h,t,\"red\")\n \n for y in range(10):\n if (y % 2) != 0:\n pos = (10*y)+10\n else:\n pos =((10*y)-9)+10 \n for x in range(10):\n filldraw_rectangle(x,y,1,1,t,\"red\")\n if pos in snake1 or pos in snake2: \n filldraw_rectangle(x,y,1,1,t,\"green\")\n if pos in ladder1 or pos in ladder2: \n filldraw_rectangle(x,y,1,1,t,\"yellow\")\n write_num(x,y,t,pos) \n if (y % 2) != 0:\n pos = pos - 1\n else:\n pos = pos + 1", "def draw_building():\n\n gerardo.penup()\n gerardo.backward(135)\n gerardo.pendown()\n gerardo.begin_fill()\n for i in range(2): # this loop draws out the rectangle for the building\n gerardo.forward(200)\n gerardo.right(90)\n gerardo.forward(100)\n gerardo.right(90)\n gerardo.end_fill()\n gerardo.hideturtle()", "def __init__(self):\r\n Frame.__init__(self)\r\n self.master.title(\"GUIs drawing geometric shapes\")\r\n self.grid()\r\n\r\n #create a canvas and place in this frame\r\n self.canvas = Canvas(self, width = 300, height = 400)\r\n self.canvas.grid(row = 0, column = 0)\r\n\r\n self.canvas.create_rectangle(100, 50, 200, 350)\r\n self.canvas.create_oval(100, 50, 200, 150,\r\n fill = \"white\", tags = \"RED\")\r\n self.canvas.create_oval(100, 150, 200, 250,\r\n fill = \"white\", tags = \"YELLOW\")\r\n self.canvas.create_oval(100, 250, 200, 350,\r\n fill = \"green\", tags = \"GREEN\")\r\n\r\n \r\n dx = 1\r\n while True:\r\n self.canvas.after(2000) # Sleep for 15 milliseconds\r\n self.canvas.update() # Update canvas\r\n if dx == 1:\r\n self.canvas.itemconfigure(\"YELLOW\", fill = \"yellow\")\r\n self.canvas.itemconfigure(\"GREEN\", fill = \"white\")\r\n dx += 1\r\n elif dx == 2:\r\n self.canvas.itemconfigure(\"RED\", fill = \"red\")\r\n self.canvas.itemconfigure(\"YELLOW\", fill = \"white\")\r\n dx += 1 \r\n else:\r\n self.canvas.itemconfigure(\"RED\", fill = \"white\")\r\n self.canvas.itemconfigure(\"GREEN\", fill = \"green\")\r\n dx = 1", "def draw(self, base, level):\n\n a = base.a\n b = base.b\n\n if level > 0:\n delta = base.b - base.a\n px = a.x + delta.x / 3\n py = a.y + delta.y / 3\n rx = a.x + 2 * delta.x / 3\n ry = a.y + 2 * delta.y / 3\n p = Point(px, py)\n r = Point(rx, ry)\n q = Point(rx, ry)\n q.rotate_deg(60, p)\n self.draw(Line(a,p), level-1)\n self.draw(Line(p,q), level-1)\n self.draw(Line(q,r), level-1)\n self.draw(Line(r,b), level-1)\n else:\n self.container.window.create_line(a.x, a.y, b.x, b.y)", "def show(self):\n data = []\n for row in self.grid:\n mid, bottom = [], []\n for node in row:\n \tmid += [0, int(node.right)]\n \tbottom += [int(node.down), 1]\n data += mid + [0] + bottom + [0] \n data[self.width*2+1] = 1\n data[-1] = 1\n data += (self.width*2) * [0]\n im = Image.new('1', (self.width*2+1, self.height*2+1))\n im.putdata(data)\n im.save('maze.png')\n im.show()", "def draw(self):\n i = 0\n self.window.fill((60,50,20))\n for i in range(len(self.board)):\n for j in range(len(self.board[i])):\n pygame.draw.rect(self.window, ((i+j)%2*255, (i+j)%2*255, (i+j)%2*255), (20+j*100, 20+i*100, 100, 100))\n if self.board[i][j] != 0:\n if self.board[i][j].player == 0:\n color = (200, 0, 0)\n else:\n color = (0, 0, 200)\n if self.board[i][j].direction == 0:\n pygame.draw.ellipse(self.window, color, (30+j*100, 40+i*100, 80, 60))\n elif self.board[i][j].direction == 1:\n pygame.draw.ellipse(self.window, color, (40+j*100, 30+i*100, 60, 80))\n if self.board[i][j].master:\n if self.board[i][j].direction == 0:\n pygame.draw.ellipse(self.window, (255,255,0), (40+j*100, 50+i*100, 60, 40))\n pygame.draw.ellipse(self.window, color, (45+j*100, 55+i*100, 50, 30))\n elif self.board[i][j].direction == 1:\n pygame.draw.ellipse(self.window, (255,255,0), (50+j*100, 40+i*100, 40, 60))\n pygame.draw.ellipse(self.window, color, (55+j*100, 45+i*100, 30, 50))\n \n if self.selected != None:\n pygame.draw.rect(self.window, (200, 200, 0), (20+self.selected[1]*100, 20+self.selected[0]*100, 100, 100), 5)\n pygame.display.flip()", "def __init__(self, parent, tile_dir=None, start_level=None,\n min_level=None, max_level=None, **kwargs):\n\n # create and initialise the base panel\n wx.Panel.__init__(self, parent=parent, id=wx.ID_ANY, **kwargs)\n self.SetBackgroundColour(pySlip.BackgroundColour)\n\n # get tile info\n self.tiles = pySlip.Tiles(tile_dir)\n self.max_level = max_level\n if max_level is None:\n self.max_level = self.tiles.max_level\n self.min_level = min_level\n if min_level is None:\n self.min_level = self.tiles.min_level\n self.level = start_level\n if start_level is None:\n self.level = self.min_level\n\n self.tile_size_x = self.tiles.tile_size_x\n self.tile_size_y = self.tiles.tile_size_y\n\n # set some internal state\n self.view_width = None # view size in pixels\n self.view_height = None # set on onResize()\n\n self.ppd_x = 0 # pixel_per_degree for current tileset\n self.ppd_y = 0\n\n self.view_offset_x = 0 # pixel offset at left & top of view\n self.view_offset_y = 0\n\n self.view_llon = self.view_rlon = None # view limits\n self.view_tlat = self.view_blat = None\n\n self.was_dragging = False # True if dragging map\n self.move_dx = 0 # drag delta values\n self.move_dy = 0\n self.last_drag_x = None # previous drag position\n self.last_drag_y = None\n\n self.ignore_next_up = False # flag to ignore next UP event\n\n self.is_box_select = False # True if box selection\n self.sbox_1_x = self.sbox_1_y = None # box size\n\n # layer stuff\n self.next_layer_id = 1 # source of unique layer IDs\n self.layer_z_order = [] # layer Z order, contains layer IDs\n self.layer_mapping = {} # maps layer ID to (...layer data...)\n\n # callback to report mouse position in view\n self.mouse_position_callback = None\n\n # callback on right mouse click (right button up event)\n self.rightclick_callback = None\n\n # callback on level change\n self.change_level_callback = None\n\n # bind events\n self.Bind(wx.EVT_SIZE, self.onResize) # widget events\n self.Bind(wx.EVT_PAINT, self.onPaint)\n\n self.Bind(wx.EVT_MOTION, self.onMove) # mouse events\n self.Bind(wx.EVT_LEFT_DOWN, self.onLeftDown)\n self.Bind(wx.EVT_LEFT_DCLICK, self.onLeftDClick)\n self.Bind(wx.EVT_LEFT_UP, self.onLeftUp)\n self.Bind(wx.EVT_RIGHT_DOWN, self.onRightDown)\n self.Bind(wx.EVT_RIGHT_UP, self.onRightUp)\n self.Bind(wx.EVT_MIDDLE_DOWN, self.onMiddleDown)\n self.Bind(wx.EVT_MIDDLE_UP, self.onMiddleUp)\n self.Bind(wx.EVT_MOUSEWHEEL, self.onMouseWheel)\n\n # OK, use the tile level the user wants\n self.use_level(self.level)\n\n # force a resize, which sets up the rest of the state\n self.onResize()", "def draw( self, screen, game_self):\r\n\r\n if self.is_visible == False: return\r\n \r\n window.Window.draw(self, screen)\r\n\r\n screen.blit( self.top_font, ((self.centerx-self.top_font.get_width()/2), 60))\r\n \r\n\r\n if self.menu == self.SWORD:\r\n pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 95, 232, 30), 0)\r\n if self.menu == self.KATANA:\r\n pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 125, 232, 30), 0)\r\n if self.menu == self.BLUNT:\r\n pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 155, 232, 30), 0)\r\n if self.menu == self.GUN:\r\n pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 185, 232, 30), 0)\r\n if self.menu == self.THROW: \r\n pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 215, 232, 30), 0)\r\n if self.menu == self.SHIELD:\r\n pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 245, 232, 30), 0)\r\n if self.menu == self.ARMOR:\r\n pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 275, 232, 30), 0)\r\n if self.menu == self.HELMET:\r\n pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 305, 232, 30), 0)\r\n if self.menu == self.GAUNTLET:\r\n pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 335, 232, 30), 0)\r\n if self.menu == self.ACCESSORY:\r\n pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 365, 232, 30), 0)\r\n if self.menu == self.ITEM:\r\n pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 395, 232, 30), 0)\r\n\r\n\r\n screen.blit(self.sword_font, ((self.centerx-self.sword_font.get_width()/2), 100))\r\n screen.blit(self.katana_font, ((self.centerx-self.katana_font.get_width()/2), 130))\r\n screen.blit(self.blunt_font, ((self.centerx-self.blunt_font.get_width()/2), 160))\r\n screen.blit(self.gun_font, ((self.centerx-self.gun_font.get_width()/2), 190))\r\n screen.blit(self.throw_font, ((self.centerx-self.throw_font.get_width()/2), 220))\r\n screen.blit(self.shield_font, ((self.centerx-self.shield_font.get_width()/2), 250))\r\n screen.blit(self.armor_font, ((self.centerx-self.armor_font.get_width()/2), 280))\r\n screen.blit(self.helmet_font, ((self.centerx-self.helmet_font.get_width()/2), 310))\r\n screen.blit(self.gauntlet_font, ((self.centerx-self.gauntlet_font.get_width()/2), 340))\r\n screen.blit(self.accessory_font, ((self.centerx-self.accessory_font.get_width()/2), 370))\r\n screen.blit(self.item_font, ((self.centerx-self.item_font.get_width()/2), 400))\r\n\r\n\r\n #draw extra window\r\n self.buy_window.draw(screen, game_self)", "def draw( self, **kw ):\n pass", "def draw(self):\n spacing = 50\n # # Pygame Setup # #\n # calculate how wide and tall it needs to be\n width = (self.num_hidden_layers + 3) * spacing * 2\n values = [self.num_input_nodes, self.num_hidden_nodes, self.num_output_nodes]\n values.sort(reverse=True)\n height = (values[0] + 1) * spacing\n pygame.init()\n screen = pygame.display.set_mode([width, height])\n pygame.display.set_caption(\"Genetic Path Finding\") # name of the window created\n clock = pygame.time.Clock() # used to manage how fast the screen updates\n myfont = pygame.font.Font(None, 12) # sets the font for text in pygame\n drawing = True\n while drawing:\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n return\n screen.fill((255, 255, 255))\n\n h_percentile = height - spacing\n w_percentile = (width - (spacing * 2)) / (self.num_hidden_layers + 2)\n # Nodes\n for node in range(self.num_input_nodes):\n pos = h_percentile / (self.num_input_nodes + 1)\n gg.draw_circle(screen, (105, 105, 105), self.node_pos(spacing, 'input', 1, node), 5, aa=True)\n for layer in range(self.num_hidden_layers):\n for node in range(self.num_hidden_nodes):\n pos = h_percentile / (self.num_hidden_nodes + 1)\n bias = self.biases[layer][node]\n color = gg.color_gradient(math.tanh(bias))\n gg.draw_circle(screen, color, self.node_pos(spacing, 'hidden', layer, node), 5, aa=True)\n for node in range(self.num_output_nodes):\n pos = h_percentile / (self.num_output_nodes + 1)\n bias = self.biases[-1][node]\n color = gg.color_gradient(math.tanh(bias))\n gg.draw_circle(screen, color, self.node_pos(spacing, 'output', 1, node), 5, aa=True)\n\n # Connections\n for inp in range(self.num_input_nodes):\n for node in range(self.num_hidden_nodes):\n weight = self.weights[0][node][inp]\n color = gg.color_gradient(math.tanh(weight))\n pygame.draw.aaline(screen, color, self.node_pos(spacing, 'input', 1, inp),\n self.node_pos(spacing, 'hidden', 0, node))\n for layer in range(0, self.num_hidden_layers - 1):\n for node in range(self.num_hidden_nodes):\n for other in range(self.num_hidden_nodes):\n weight = self.weights[layer + 1][other][node]\n color = gg.color_gradient(math.tanh(weight))\n pygame.draw.aaline(screen, color, self.node_pos(spacing, 'hidden', layer, node),\n self.node_pos(spacing, 'hidden', layer + 1, other))\n for node in range(self.num_hidden_nodes):\n for out in range(self.num_output_nodes):\n layer = self.num_hidden_layers\n weight = self.weights[layer][out][node]\n color = gg.color_gradient(math.tanh(weight))\n pygame.draw.aaline(screen, color, self.node_pos(spacing, 'hidden', layer - 1, node),\n self.node_pos(spacing, 'output', 1, out))\n\n pygame.display.flip()\n clock.tick(20)", "def level_creator(self, screen: pygame.Surface) -> None:\n self.name = input()\n f = open(normpath(self.name), 'w')\n work = True\n while work == True:\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n work = False\n if event.type == pygame.MOUSEBUTTONDOWN:\n self.x1, self.y1 = event.pos\n self.checker[0] = True\n if event.type == pygame.MOUSEBUTTONUP:\n self.x2, self.y2 = event.pos\n self.checker[1] = True\n if event.type == pygame.KEYDOWN and chr(event.key) == 'o':\n self.level_write(f)\n work = False\n if self.checker[0] == self.checker[1] == True:\n if self.x1 > self.x2 and self.y1 > self.y2:\n self.y1, self.y2, self.x1, self.x2 = self.y2, self.y1, self.x2, self.x1\n elif self.x1 > self.x2 and self.y1 < self.y2:\n self.x1, self.x2 = self.x2, self.x1\n elif self.x1 < self.x2 and self.y1 > self.y2:\n self.y1, self.y2 = self.y2, self.y1\n self.objects.append(pygame.Rect(self.x1, self.y1, abs(\n self.x2-self.x1), abs(self.y2-self.y1)))\n self.checker[0], self.checker[1] = False, False\n self.draw(screen)", "def __init__(self, width, height, background=None, mode=\"RGBA\", ppi=300):\n # unless specified, interpret width and height as pixels\n width = units.parse_dist(width, default_unit=\"px\", ppi=ppi)\n height = units.parse_dist(height, default_unit=\"px\", ppi=ppi)\n width,height = int(round(width)),int(round(height))\n # create image\n self.img = PIL.Image.new(mode, (width, height), background)\n # create drawer\n self.drawer = aggdraw.Draw(self.img)\n # remember info\n self.background = background\n self.ppi = ppi\n # by default, interpret all sizes in % of width\n self.default_unit = \"%w\"\n # by default, interpret all coordinates in pixel space\n self.pixel_space()", "def draw_flower_bed():\n turtle.up()\n turtle.left(180)\n turtle.forward(200)\n turtle.right(180)\n turtle.down()\n for x in range(3):\n draw_flower_advanced()", "def draw_spiders(self, spiders, graph, positions, draw_box_labels=True):", "def __init__(self, mapfile, xpos, zpos, emap, width=10.0, depth=10.0, height=10.0, name=\"building\", draw_details=None, yoff=0.0, scheme=None):\r\n self.xpos = xpos\r\n self.zpos = zpos\r\n self.width = width\r\n self.depth = depth\r\n self.height = height\r\n self.name = name\r\n self.ceilingthickness = 1.0\r\n self.walls = []\r\n\r\n if scheme == None:\r\n self.scheme = Building.baseScheme\r\n else:\r\n self.scheme = scheme\r\n\r\n # We don't have to be rigorous here, this should only be a draw_details or an iterable of draw_details.\r\n if hasattr(draw_details, \"__getitem__\") or hasattr(draw_details, \"__iter__\"):\r\n assert (len(draw_details) == self.scheme[\"#models\"])\r\n self.details = draw_details\r\n else:\r\n self.details = [draw_details for x in range(self.scheme[\"#models\"])]\r\n # having a method like this allows draw details to be set later\r\n\r\n self.yoff = yoff\r\n\r\n self.model = [MergeShape(name=name+\".\"+str(x)) for x in range(self.scheme[\"#models\"])]\r\n\r\n if mapfile[0] != '/':\r\n mapfile = sys.path[0] + '/' + mapfile\r\n print(\"Loading building map ...\", mapfile)\r\n\r\n im = Image.open(mapfile)\r\n im = ImageOps.invert(im)\r\n ix,iy = im.size\r\n\r\n print(\"image size\", ix, \",\", iy)\r\n\r\n startx = xpos - ix / 2 * width\r\n starty = zpos - ix / 2 * depth\r\n\r\n yoff += emap.calcHeight(-xpos,-zpos)\r\n\r\n if not im.mode == \"P\":\r\n im = im.convert('P', palette=Image.ADAPTIVE)\r\n im = im.transpose(Image.FLIP_TOP_BOTTOM)\r\n im = im.transpose(Image.FLIP_LEFT_RIGHT)\r\n pixels = im.load()\r\n\r\n for y in range(1,iy-1):\r\n print(\".\", end='')\r\n for x in range(1,ix-1):\r\n colour = pixels[x,y]\r\n\r\n if x == 1:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x-1,y], \"edge\"), wallfunc=self.west_wall, ceilingedgefunc=self.west_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n else:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x-1,y]), wallfunc=self.west_wall, ceilingedgefunc=self.west_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n\r\n if x == ix-2:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x+1,y], \"edge\"), wallfunc=self.east_wall, ceilingedgefunc=self.east_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n else:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x+1,y]), wallfunc=self.east_wall, ceilingedgefunc=self.east_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n\r\n if y == 1:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x,y-1], \"edge\"), wallfunc=self.south_wall, ceilingedgefunc=self.south_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n else:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x,y-1]), wallfunc=self.south_wall, ceilingedgefunc=self.south_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n\r\n if y == iy-2:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x, y+1], \"edge\"), wallfunc=self.north_wall, ceilingedgefunc=self.north_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n else:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x,y+1]), wallfunc=self.north_wall, ceilingedgefunc=self.north_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n\r\n self._executeScheme(x, y, startx, starty, (colour, None), wallfunc=None, ceilingedgefunc=None, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n\r\n self.set_draw_details(self.details) # after models created otherwise\r\n # details lost by merging\r", "def setup_level_1() -> object:\n #create level object\n level = Level()\n\n #create vertical walls for level\n create_and_add_vertical_walls_to_list(4, 39, 4, level.wall_list)\n create_and_add_vertical_walls_to_list(4, 25, 19, level.wall_list)\n create_and_add_vertical_walls_to_list(33, 54, 19, level.wall_list)\n create_and_add_vertical_walls_to_list(4, 25, 34, level.wall_list)\n create_and_add_vertical_walls_to_list(33, 54, 34, level.wall_list)\n create_and_add_vertical_walls_to_list(14, 25, 54, level.wall_list)\n create_and_add_vertical_walls_to_list(33, 44, 54, level.wall_list)\n create_and_add_vertical_walls_to_list(14, 45, 74, level.wall_list)\n create_and_add_vertical_walls_to_list(54, settings.HEIGHT, 23, level.wall_list)\n create_and_add_vertical_walls_to_list(54, settings.HEIGHT, 30, level.wall_list)\n\n #create horizontal walls for level\n create_and_add_horiontal_walls_to_list(4, 34, 4, level.wall_list)\n create_and_add_horiontal_walls_to_list(4, 9, 19, level.wall_list)\n create_and_add_horiontal_walls_to_list(15, 24, 19, level.wall_list)\n create_and_add_horiontal_walls_to_list(30, 54, 19, level.wall_list)\n create_and_add_horiontal_walls_to_list(54, 74, 14, level.wall_list)\n create_and_add_horiontal_walls_to_list(4, 24, 39, level.wall_list)\n create_and_add_horiontal_walls_to_list(30, 54, 39, level.wall_list)\n create_and_add_horiontal_walls_to_list(54, 74, 44, level.wall_list)\n create_and_add_horiontal_walls_to_list(19, 24, 54, level.wall_list)\n create_and_add_horiontal_walls_to_list(30, 35, 54, level.wall_list)\n\n #create knight character for level\n create_and_add_character_to_list(\"pics\\prison_guard.png\", 0.2, 270, 470, level.character_list)\n\n #knight asks for bribe\n guard_convo = Dialogue(300, 500, 150, 50, \"I know who you are...\\n if you pay me,\\n I'll turn a blind eye.\")\n level.dialogue_list.append(guard_convo)\n\n #create coin item to bribe knight character\n create_and_add_item_to_list(\"pics\\gold_1.png\", 0.5, 400, 250, level.item_list)\n\n #create prompts and info for rooms for object\n cell = RoomInfo(120, 100, \"Dungeon cell. There's a note and key. Someone's waiting for you in the garden.\")\n level.room_info_list.append(cell)\n guard_room = RoomInfo(450, 280, \"Guardroom. There's the unconconsious bodies of the guards. Your saviours must've gone to great lengths...\")\n level.room_info_list.append(guard_room)\n torture_chamber = RoomInfo(120, 280, \"Torture chamber. You've been here before. They were questioning you, but you didn't answer.\")\n level.room_info_list.append(torture_chamber)\n battle_room = RoomInfo(650, 280, \"Battle room. You see that your captors are fighting revolutionaries- those who seek to bring back a lost king.\")\n level.room_info_list.append(battle_room)\n stairwell = RoomInfo(220, 520, \"Stairwell. There's a lone guard who doesn't look surprised to see you\")\n level.room_info_list.append(stairwell)\n\n return level", "def draw_flower_advanced():\n draw_flower()\n turtle.left(90)\n turtle.up() #Raise pen for movement\n turtle.forward(150)\n turtle.left(90)\n turtle.forward(150)\n turtle.right(90)\n turtle.down() #lower pen for drawing", "def draw(self, win, player, displayList, enemyHitboxList, mapWidth, mapHeight):\n cameraX = player.rect.left + (player.rect.width // 2) - (SCREEN_WIDTH // 2)\n cameraY = player.rect.top + (player.rect.height // 2) - (SCREEN_HEIGHT // 2)\n\n # On centre la camera tant que le joueurs n'atteind pas les bords\n if cameraX >= 0 and cameraX < mapWidth - SCREEN_WIDTH:\n self.x = cameraX\n\n if cameraY >= 0 and cameraY < mapHeight - SCREEN_HEIGHT:\n self.y = cameraY\n\n # Calcul de l'X du joueur en fonction s'il est en haut, bas ou entre les 2\n if cameraX >= 0 and cameraX < mapWidth - SCREEN_WIDTH:\n playerX = (SCREEN_WIDTH // 2) - (player.rect.width // 2)\n else:\n # Si le joueur est a droite\"\"\"\n if cameraX >= mapWidth - SCREEN_WIDTH:\n self.x = mapWidth - SCREEN_WIDTH\n playerX = player.rect.left - mapWidth + SCREEN_WIDTH\n # Si le joueur est a gauche\"\"\"\n else:\n self.x = 0\n playerX = player.rect.left\n\n\n # Calcul de l'Y du joueur en fonction s'il est a gauche, droite ou entre les 2\n if cameraY >= 0 and cameraY < mapHeight - SCREEN_HEIGHT:\n playerY = (SCREEN_HEIGHT // 2) - (player.rect.height // 2)\n else:\n # Si le joueur est en dessous\n if cameraY >= mapHeight - SCREEN_HEIGHT:\n self.y = mapHeight - SCREEN_HEIGHT\n playerY = player.rect.top - mapHeight + SCREEN_HEIGHT\n # Si le joueur est au dessus \n else:\n self.y = 0\n playerY = player.rect.top\n\n for element in displayList:\n element.draw(win,element.rect.left - self.x,element.rect.top - self.y)\n #for elem in enemyHitboxList:\n #pg.draw.rect(win, (200, 200, 200), pg.Rect(elem.left - self.x,elem.top - self.y, elem.width, elem.height))\n player.draw(win, playerX, playerY)", "def OnCustomRender(self, dc, item, rect):\r\n \r\n pass", "def draw_box(stdscr, y, x, height, width, mode=0):\n if mode == 0:\n stdscr.addstr(y, x, \"┌\" + \"─\" * (width - 1) + \"┐\")\n stdscr.addstr(y + height, x, \"└\" + \"─\" * (width - 1) + \"┘\")\n for i in range(y + 1, y + height):\n stdscr.addstr(i, x, \"│\")\n stdscr.addstr(i, x + width, \"│\")\n if mode == 1:\n stdscr.addstr(y, x, \"╭\" + \"─\" * (width - 1) + \"╮\")\n stdscr.addstr(y + height, x, \"╰\" + \"─\" * (width - 1) + \"╯\")\n for i in range(y + 1, y + height):\n stdscr.addstr(i, x, \"│\")\n stdscr.addstr(i, x + width, \"│\")\n if mode == 2:\n stdscr.addstr(y, x, \"╔\" + \"═\" * (width - 1) + \"╗\")\n stdscr.addstr(y + height, x, \"╚\" + \"═\" * (width - 1) + \"╝\")\n for i in range(y + 1, y + height):\n stdscr.addstr(i, x, \"║\")\n stdscr.addstr(i, x + width, \"║\")", "def __init__(self, _pendown=1, gridmode=False, gridsize=50, homeX = 50 + 25 + 5, homeY = 50 + 25 + 5, canvWidth = 400, canvHeight = 200, \\\n turtleMainColor=\"#00A651\", turtleAccentColor=\"#FFF600\", speed = 5, rotspeed = 5, pencolor = 'red', penwidth=3):\n self._turtleMainColor = turtleMainColor\n self._turtleAccentColor = turtleAccentColor\n self._speed = speed\n self._rotspeed = rotspeed\n self._pendown = _pendown\n self._pencolor = pencolor\n self._penwidth = penwidth\n self._rotation = 90\n self._gridsize = gridsize\n self._gridmode = gridmode\n \n if(gridmode and homeX == 80):\n homeX = 0\n homeY = 0\n \n self._x = homeX\n self._y = homeY\n self._homeX = homeX\n self._homeY = homeY\n \n self._canvWidth = canvWidth\n self._canvHeight = canvHeight\n self._actions = []\n self._levelDataString = [] \n \n self._walls = []\n self._lava = []\n \n self._appendCurrentState();", "def draw_monster(generikmon):\r\n turtle.clear()\r\n mirror = 1\r\n drawhalfmonster(mirror, generikmon)\r\n mirror = -1\r\n drawhalfmonster(mirror, generikmon) #draws second half of monster\r\n return()", "def draw(self, DISPLAYSURF):\n\n pygame.draw.rect(DISPLAYSURF, GREY, self.menuRect)\n\n for item in self._menu_items:\n if item['menu_type'] == 'Button':\n pygame.draw.rect(DISPLAYSURF, item['colour'], item['rect'])\n DISPLAYSURF.blit(item['text'], (item['rect'].left+7, item['rect'].top+4))\n # highlights\n if item['highlight'] == ACTIVE and item['persistence'] == True:\n if item['colour'] != WHITE:\n pygame.draw.rect(DISPLAYSURF, BLACK, item['rect'], 3)\n else:\n pygame.draw.rect(DISPLAYSURF, BLUE, item['rect'], 3)\n elif item['menu_type'] == 'Slider':\n pygame.draw.rect(DISPLAYSURF, item['colour'], item['rect'])\n DISPLAYSURF.blit(item['text'][0], (item['xpos']+10, item['ypos']-40))\n DISPLAYSURF.blit(item['text'][1], (item['xpos']-5, item['ypos']+15))\n DISPLAYSURF.blit(item['text'][2], (item['xpos']+item['width']-15, item['ypos']+15))\n pygame.draw.line(DISPLAYSURF, item['colour'], (item['xpos'], item['ypos']),\n (item['xpos']+item['width'], item['ypos']))\n for button in self._menu_items:\n if button['menu_type'] == 'Button':\n if button['persistence'] == False:\n if button['rect'].collidepoint(pygame.mouse.get_pos()[0],\n pygame.mouse.get_pos()[1]):\n pygame.draw.rect(DISPLAYSURF, BLUE, button['rect'], 3)", "def create_menu():\n MenuData = [\n (\"&Draw Variables\",drawable.ask),\n (\"&Show Variables\",printall),\n (\"&Print Variables\",printval),\n (\"&Edit Variable\",edit),\n (\"&Rename Variable\",rename),\n (\"&Forget Variables\",forget),\n (\"---\",None),\n (\"&Create Plane\",\n [(\"Coordinates\", \n [(\"Point and normal\", createPlaneCoordsPointNormal),\n (\"Three points\", createPlaneCoords3Points),\n ]), \n (\"Visually\", \n [(\"Three points\", createPlaneVisual3Points),\n ]),\n ]),\n (\"&Select Plane\",planes.ask),\n (\"&Draw Selection\",planes.draw),\n (\"&Forget Selection\",planes.forget),\n (\"---\",None),\n (\"&Pick Actors\",pick_actors),\n (\"&Pick Elements\",pick_elements),\n (\"&Pick Points\",pick_points),\n (\"&Pick Edges\",pick_edges),\n (\"---\",None),\n ('&Selection',\n [('&Create Report',report_selection),\n ('&Set Property',setprop_selection),\n ('&Grow',grow_selection),\n ('&Partition',partition_selection),\n ('&Get Partition',get_partition),\n ('&Export',export_selection),\n ]),\n (\"---\",None),\n ('&Query',\n [('&Actors',query_actors),\n ('&Elements',query_elements),\n ('&Points',query_points),\n ('&Edges',query_edges),\n ('&Distances',query_distances),\n ]),\n (\"---\",None),\n (\"&Close\",close_menu),\n ]\n return widgets.Menu('Tools',items=MenuData,parent=GD.gui.menu,before='help')", "def draw_glycan_in_canvas(self, canvas, tree, root, names, h = 100., w = 100.):\n fig = mpl.figure.Figure(figsize=(h/self.dpi, w/self.dpi))\n ax = fig.add_subplot(111)\n \n self.myDrawer.draw_tree(tree, root, names, root_pos = [0, 0], direction = 1, ax = ax, axis = 0)\n ax.axis('equal')\n ax.axis('off')\n ax.set_ylim((-1, 6))\n ax.set_xlim((-3, 3))\n\n # Add to tk window\n figure_canvas_agg = FigureCanvasAgg(fig)\n figure_canvas_agg.draw()\n figure_x, figure_y, figure_w, figure_h = fig.bbox.bounds\n figure_w, figure_h = int(figure_w), int(figure_h)\n glycan_image = tk.PhotoImage(master = canvas, width=figure_w, height=figure_h)\n canvas.create_image(figure_w/2, figure_h/2, image = glycan_image)\n tkagg.blit(glycan_image, figure_canvas_agg.get_renderer()._renderer, colormode=2)\n return glycan_image", "def drawBoard(self):\r\n self.outer.draw(self.surface)\r\n self.background.draw(self.surface)\r\n for point in self.points:\r\n point.draw(self.surface)\r\n point.drawCheckers(self.surface)\r\n self.dice.draw(self.surface)\r\n self.message.draw(self.surface)\r\n self.checkerBox.draw(self.surface)\r\n self.checkerBox.drawCheckers(self.surface)\r\n for bar in self.bar:\r\n bar.draw(self.surface)\r\n bar.drawCheckers(self.surface)\r\n pygame.display.flip()", "def draw_card(dealer,player): \n depth = 100\n x0,y0 = 100,100\n x1,y1 = 100,300\n\n bj_board.clear()\n for i in range(len(dealer)):\n if dealer[i].state==True:\n bj_board.add(dealer[i].image)\n dealer[i].image.moveTo(x0+i*20,y0)\n dealer[i].image.setDepth(depth-10*i)\n elif dealer[i].state==False:\n img=Image(img_path+\"Back.png\")\n bj_board.add(img)\n img.moveTo(x0+i*20,y0)\n img.setDepth(depth-10*i)\n for i in range(len(player)):\n bj_board.add(player[i].image)\n player[i].image.moveTo(x1+i*20,y1)\n player[i].image.setDepth(depth-10*i) \n \n text=Text(\"Your Total: \" + str(hand_value(player)))\n text.moveTo(300,300)\n bj_board.add(text)\n \n if dealer[0].state==True:\n text=Text(\"Dealer Total: \" + str(hand_value(dealer)))\n text.moveTo(300,100)\n bj_board.add(text)", "def example_BSR():\n pts = [(1,1),(2,2),(3,3)]\n lines = [ [ (1,1), (1,2), (2,1)], [ (6,1), (1,6), (5,-1)] ]\n\n bloody_simple_2drender('2d_render.png', pts=pts, vecs=pts, lines=lines )", "def create_diamonds(wof_settings,screen,diamonds,levelMap):\n \n diamond_width = wof_settings.element_width\n diamond_height = wof_settings.element_height\n \n # Place the diamonds to the field\n for diamond_position in levelMap['diamond']:\n diamond = Diamond(screen)\n diamond.x = diamond_position[1] * diamond_width\n diamond.y = diamond_position[0] * diamond_height\n diamond.rect.x = diamond.x\n diamond.rect.y = diamond.y\n diamonds.add(diamond)", "def __init__(self):\n self.opening_scene = DungeonGate()\n # this list define the order of scenes in the corridor\n self.corridor_scenes = [GuardsRoom(), Cell(), Armory(), EmptyRoom(), Dormitory()]\n shuffle(self.corridor_scenes)\n self.explored_scenes = {\n \"GuardsRoom\": \"unexplored\",\n \"Cell\": \"unexplored\",\n \"Dormitory\": \"unexplored\",\n \"Armory\": \"unexplored\",\n \"EmptyRoom\": \"unexplored\",\n \"DungeonGate\": \"unexplored\"\n }", "def draw_field_of_view(gameDisplay, hero):\n #Fill with black\n gameDisplay.fill(white)\n \n #Draw ground\n \n for x in range(visible_squares[0]):\n #From left to right <-> 0 to 16\n #\n for y in range(visible_squares[1]):\n g_x, g_y = l2g(x, y, hero)\n p_x, p_y = l2p(x, y)\n \n if not hero.Map.world[g_x][g_y]:\n \"\"\"This should only be the case if the player\n is within sight of the edge of earth\"\"\"\n pygame.draw.rect(gameDisplay, \n black, \n [p_x, p_y,square_size[0], square_size[1]]\n )\n \n elif hero.Map.world[g_x][g_y] == 1:\n pygame.draw.rect(gameDisplay, green, \n [p_x, p_y,square_size[0], square_size[1]])\n\n elif hero.Map.world[g_x][g_y] == 2:\n pygame.draw.rect(gameDisplay, bech, \n [p_x, p_y, square_size[0], square_size[1]])\n \n elif hero.Map.world[g_x][g_y] == 3:\n pygame.draw.rect(gameDisplay, blue, \n [p_x, p_y, square_size[0], square_size[1]])\n \n \n #pygame.draw.rect(gameDisplay, red, [px, py, square_size, square_size]) \n\n #draw_hero\n hero_px, hero_py = l2p(8, 8)\n \n pygame.draw.rect(gameDisplay, red, [hero_px, hero_py, \n square_size[0], square_size[1]])\n #gameDisplay.draw", "def render(self, mode = 'human'):\n if mode == 'human':\n\n if self.is_2d:\n fig = plt.figure()\n for index, value in np.ndenumerate(self.board):\n if value == 1:\n plt.scatter(*index, c='red', s=1000, alpha=0.2)\n elif value == -1:\n plt.scatter(*index, c='blue', s=1000, alpha=0.2)\n plt.xlim(-1, self.dim[0])\n plt.ylim(-1, self.dim[1])\n plt.xticks([])\n plt.yticks([])\n plt.grid(True)\n\n if self.is_3d:\n fig = plt.figure()\n ax = Axes3D(fig)\n for index, value in np.ndenumerate(self.board):\n if value == 1:\n ax.scatter(*index, c='red', s=1000, alpha=0.2)\n elif value == -1:\n ax.scatter(*index, c='blue', s=1000, alpha=0.2)\n ax.set_xlim(0, self.dim[0] - 1)\n ax.set_ylim(0, self.dim[1] - 1)\n ax.set_zlim(0, self.dim[2] - 1)\n\n else:\n fig = plt.figure()\n ax = Axes3D(fig)\n for index, value in np.ndenumerate(self.board):\n if value == 1:\n ax.scatter(*index, c='red', s=1000, alpha=0.2)\n elif value == -1:\n ax.scatter(*index, c='blue', s=1000, alpha=0.2)\n\n ax.set_xlim(0, self.dim[0] - 1)\n ax.set_ylim(0, self.dim[1] - 1)\n ax.set_zlim(0, self.dim[2] - 1)\n ax.set_title('Nr of steps: ' + str(self.steps))\n\n plt.show()\n return fig", "def PaintLevel(self, item, dc, level, y, align):\r\n\r\n x = level*self._indent\r\n\r\n left_image_list = 0\r\n if self._imageListLeft:\r\n left_image_list += self._imageListLeft.GetBitmap(0).GetWidth()\r\n \r\n x += left_image_list\r\n \r\n if not self.HasAGWFlag(TR_HIDE_ROOT):\r\n \r\n x += self._indent\r\n \r\n elif level == 0:\r\n \r\n # always expand hidden root\r\n origY = y\r\n children = item.GetChildren()\r\n count = len(children)\r\n \r\n if count > 0:\r\n n = 0\r\n while n < count:\r\n oldY = y\r\n y = self.PaintLevel(children[n], dc, 1, y, align)\r\n n = n + 1\r\n\r\n if not self.HasAGWFlag(TR_NO_LINES) and self.HasAGWFlag(TR_LINES_AT_ROOT) and count > 0:\r\n \r\n # draw line down to last child\r\n origY += self.GetLineHeight(children[0])>>1\r\n oldY += self.GetLineHeight(children[n-1])>>1\r\n oldPen = dc.GetPen()\r\n dc.SetPen(self._dottedPen)\r\n dc.DrawLine(3, origY, 3, oldY)\r\n dc.SetPen(oldPen)\r\n \r\n return y\r\n \r\n item.SetX(x+self._spacing)\r\n item.SetY(y)\r\n\r\n h = self.GetLineHeight(item)\r\n y_top = y\r\n y_mid = y_top + (h>>1)\r\n y += h\r\n\r\n exposed_x = dc.LogicalToDeviceX(0)\r\n exposed_y = dc.LogicalToDeviceY(y_top)\r\n\r\n if self.IsExposed(exposed_x, exposed_y, 10000, h): # 10000 = very much\r\n if wx.Platform == \"__WXMAC__\":\r\n # don't draw rect outline if we already have the\r\n # background colour under Mac\r\n pen = ((item.IsSelected() and self._hasFocus) and [self._borderPen] or [wx.TRANSPARENT_PEN])[0]\r\n else:\r\n pen = self._borderPen\r\n\r\n if item.IsSelected():\r\n if (wx.Platform == \"__WXMAC__\" and self._hasFocus):\r\n colText = wx.SystemSettings_GetColour(wx.SYS_COLOUR_HIGHLIGHTTEXT)\r\n else:\r\n colText = wx.SystemSettings_GetColour(wx.SYS_COLOUR_HIGHLIGHTTEXT)\r\n else:\r\n attr = item.GetAttributes()\r\n if attr and attr.HasTextColour():\r\n colText = attr.GetTextColour()\r\n else:\r\n colText = self.GetForegroundColour()\r\n\r\n if self._vistaselection:\r\n colText = wx.BLACK\r\n \r\n # prepare to draw\r\n dc.SetTextForeground(colText)\r\n dc.SetPen(pen)\r\n oldpen = pen\r\n\r\n # draw\r\n self.PaintItem(item, dc, level, align)\r\n\r\n if self.HasAGWFlag(TR_ROW_LINES):\r\n \r\n # if the background colour is white, choose a\r\n # contrasting colour for the lines\r\n medium_grey = wx.Pen(wx.Colour(200, 200, 200))\r\n dc.SetPen(((self.GetBackgroundColour() == wx.WHITE) and [medium_grey] or [wx.WHITE_PEN])[0])\r\n dc.DrawLine(0, y_top, 10000, y_top)\r\n dc.DrawLine(0, y, 10000, y)\r\n \r\n # restore DC objects\r\n dc.SetBrush(wx.WHITE_BRUSH)\r\n dc.SetTextForeground(wx.BLACK)\r\n\r\n if not self.HasAGWFlag(TR_NO_LINES):\r\n \r\n # draw the horizontal line here\r\n dc.SetPen(self._dottedPen)\r\n x_start = x\r\n if x > self._indent+left_image_list:\r\n x_start -= self._indent\r\n elif self.HasAGWFlag(TR_LINES_AT_ROOT):\r\n x_start = 3\r\n dc.DrawLine(x_start, y_mid, x + self._spacing, y_mid)\r\n dc.SetPen(oldpen) \r\n\r\n # should the item show a button?\r\n if item.HasPlus() and self.HasButtons():\r\n \r\n if self._imageListButtons:\r\n \r\n # draw the image button here\r\n image_h = 0\r\n image_w = 0\r\n image = (item.IsExpanded() and [TreeItemIcon_Expanded] or [TreeItemIcon_Normal])[0]\r\n if item.IsSelected():\r\n image += TreeItemIcon_Selected - TreeItemIcon_Normal\r\n\r\n image_w, image_h = self._imageListButtons.GetSize(image)\r\n xx = x - image_w/2\r\n yy = y_mid - image_h/2\r\n\r\n dc.SetClippingRegion(xx, yy, image_w, image_h)\r\n self._imageListButtons.Draw(image, dc, xx, yy,\r\n wx.IMAGELIST_DRAW_TRANSPARENT)\r\n dc.DestroyClippingRegion()\r\n \r\n else: # no custom buttons\r\n\r\n if self.HasAGWFlag(TR_TWIST_BUTTONS):\r\n # We draw something like the Mac twist buttons\r\n \r\n dc.SetPen(wx.BLACK_PEN)\r\n dc.SetBrush(self._hilightBrush)\r\n button = [wx.Point(), wx.Point(), wx.Point()]\r\n \r\n if item.IsExpanded():\r\n button[0].x = x - 5\r\n button[0].y = y_mid - 3\r\n button[1].x = x + 5\r\n button[1].y = button[0].y\r\n button[2].x = x\r\n button[2].y = button[0].y + 6\r\n else:\r\n button[0].x = x - 3\r\n button[0].y = y_mid - 5\r\n button[1].x = button[0].x\r\n button[1].y = y_mid + 5\r\n button[2].x = button[0].x + 5\r\n button[2].y = y_mid\r\n \r\n dc.DrawPolygon(button)\r\n\r\n else:\r\n # These are the standard wx.TreeCtrl buttons as wx.RendererNative knows\r\n \r\n wImage = 9\r\n hImage = 9\r\n\r\n flag = 0\r\n\r\n if item.IsExpanded():\r\n flag |= _CONTROL_EXPANDED\r\n if item == self._underMouse:\r\n flag |= _CONTROL_CURRENT\r\n\r\n self._drawingfunction(self, dc, wx.Rect(x - wImage/2, y_mid - hImage/2,wImage, hImage), flag)\r\n \r\n if item.IsExpanded():\r\n \r\n children = item.GetChildren()\r\n count = len(children)\r\n \r\n if count > 0:\r\n \r\n n = 0\r\n level = level + 1\r\n\r\n while n < count:\r\n oldY = y\r\n y = self.PaintLevel(children[n], dc, level, y, align)\r\n n = n + 1\r\n \r\n if not self.HasAGWFlag(TR_NO_LINES) and count > 0:\r\n \r\n # draw line down to last child\r\n oldY += self.GetLineHeight(children[n-1])>>1\r\n if self.HasButtons():\r\n y_mid += 5\r\n\r\n # Only draw the portion of the line that is visible, in case it is huge\r\n xOrigin, yOrigin = dc.GetDeviceOrigin()\r\n yOrigin = abs(yOrigin)\r\n width, height = self.GetClientSize()\r\n\r\n # Move end points to the begining/end of the view?\r\n if y_mid < yOrigin:\r\n y_mid = yOrigin\r\n if oldY > yOrigin + height:\r\n oldY = yOrigin + height\r\n\r\n # after the adjustments if y_mid is larger than oldY then the line\r\n # isn't visible at all so don't draw anything\r\n if y_mid < oldY:\r\n dc.SetPen(self._dottedPen)\r\n dc.DrawLine(x, y_mid, x, oldY)\r\n \r\n return y", "def __init__(self, mainframe, path):\n\t\tttk.Frame.__init__(self, master=mainframe)\n\t\tself.master.title('Advanced Zoom v3.0')\n\t\tself.master.geometry('800x600') # size of the main window\n\t\t# self.master.rowconfigure(0, weight=1) # make the CanvasImage widget expandable\n\t\t# self.master.columnconfigure(0, weight=1)\n\n\n\t\tnavbar = Frame(self.master, width=100)\n\t\tnavbar.pack(anchor=W, fill=Y, expand=False, side=LEFT) # <----\n\n\n\n\t\tbutton = ttk.Button(navbar, text=\"object 1\", command=self.btn1)\n\t\tbutton.grid(column=1, row=1)\n\n\t\tbutton = ttk.Button(navbar, text=\"object 2\", command=self.btn2)\n\t\tbutton.grid(column=1, row=2)\n\n\n\n\t\tcontent_frame = Frame(self.master)\n\t\tcontent_frame.pack(anchor=N, fill=BOTH, expand=True, side=LEFT )\n\t\tcontent_frame.rowconfigure(0, weight=1) # make the CanvasImage widget expandable\n\t\tcontent_frame.columnconfigure(0, weight=1)\n\n\t\t\n\t\tself.master_dict = {}\n\t\t\n\n\t\t# for F in (Menu1, Menu2, Menu3):\n\t\t# \tpage_name = F.__name__\n\n\n\t\t# obj11 = AFTA()\n\t\t# print(obj11.name)\n\t\t# obj22 = HKA()\n\t\t# print(obj22.name)\n\t\t# obj3 = MNSA()\n\t\t# print(obj3.name)\n\t\t# obj4 = ALDFA()\n\t\t# print(obj4.name)\n\t\t# obj5 = MLDFA()\n\t\t# print(obj5.name)\n\t\t# obj6 = MPTA()\n\t\t# print(obj6.name)\n\t\t# obj7 = VCA()\n\t\t# print(obj7.name)\n\n\n\n\t\tself.draw_tools = DrawTools(content_frame, path) # create widget\n\t\tself.draw_tools.grid(row=0, column=0) # show widget\n\n\t\tself.obj1 = MainAnatomy(self.draw_tools, self.master_dict)\n\t\tself.obj2 = Myobj2(self.draw_tools)", "def on_draw_overlay(self):", "def DrawGripper(self, dc, wnd, rect):\r\n \r\n i = 0\r\n while 1:\r\n \r\n if self._agwFlags & AUI_TB_VERTICAL:\r\n \r\n x = rect.x + (i*4) + 4\r\n y = rect.y + 3\r\n if x > rect.GetWidth() - 4:\r\n break\r\n \r\n else:\r\n \r\n x = rect.x + 3\r\n y = rect.y + (i*4) + 4\r\n if y > rect.GetHeight() - 4:\r\n break\r\n \r\n dc.SetPen(self._gripper_pen1)\r\n dc.DrawPoint(x, y)\r\n dc.SetPen(self._gripper_pen2)\r\n dc.DrawPoint(x, y+1)\r\n dc.DrawPoint(x+1, y)\r\n dc.SetPen(self._gripper_pen3)\r\n dc.DrawPoint(x+2, y+1)\r\n dc.DrawPoint(x+2, y+2)\r\n dc.DrawPoint(x+1, y+2)\r\n\r\n i += 1" ]
[ "0.5789169", "0.57169425", "0.5665617", "0.5664664", "0.56565046", "0.5645116", "0.5549842", "0.5513326", "0.54959905", "0.54648364", "0.5380513", "0.5378376", "0.53420097", "0.52914715", "0.52714217", "0.5257676", "0.5251487", "0.52281845", "0.52245355", "0.5197554", "0.5197302", "0.51917607", "0.51894385", "0.5184408", "0.5182002", "0.5180582", "0.5175628", "0.51724416", "0.51583385", "0.5156425", "0.5155172", "0.5143069", "0.51178586", "0.5105593", "0.5104198", "0.50963765", "0.5091602", "0.5091602", "0.5090349", "0.50835747", "0.5078259", "0.50780416", "0.50768673", "0.5069105", "0.5066567", "0.50656", "0.50656", "0.50656", "0.50656", "0.5063578", "0.5062713", "0.50569755", "0.50530154", "0.5051029", "0.50480705", "0.50369215", "0.5018244", "0.50173146", "0.50141895", "0.5012623", "0.50115967", "0.5009691", "0.50072676", "0.5006458", "0.500058", "0.5000571", "0.49977112", "0.49759206", "0.4970158", "0.49639735", "0.49568862", "0.49553117", "0.49551532", "0.49467906", "0.49467126", "0.49437186", "0.49427003", "0.49404085", "0.49395853", "0.4932897", "0.4932883", "0.49294773", "0.49265444", "0.49257442", "0.49231777", "0.49225378", "0.49215484", "0.49161386", "0.49156222", "0.49123466", "0.49122268", "0.49061313", "0.49015242", "0.49005225", "0.48936868", "0.4887923", "0.48876607", "0.488164", "0.48802584", "0.48727453" ]
0.68525183
0
make the theme drawer
def makeThemeDrawer(self,node): themeDrawer = self.makeDrawer(node) themeDrawer.getRoot().setTexture(self.image) return themeDrawer
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main(themes):\n # Get toggled mode based on current system mode.\n toggled_mode = get_toggled_mode(get_current_mode())\n print('\\nSetting themes...')\n\n for theme in themes:\n # Set toggled mode.\n theme.mode = toggled_mode\n theme.toggle_callback(theme)\n if IS_WINDOWS:\n print(f'Setting system theme to: {toggled_mode.name}')\n toggle_mode(toggled_mode)\n print()", "def __init__(self,_dir,_theme='skyblue'):\n self.dir=_dir\n self.theme = _theme", "def create_menu():", "def draw(self, context):\n layout = self.layout\n\n pie = layout.menu_pie()\n pie.operator(\"object.view_menu\", text=\"Node Editor\", icon='NODETREE').vp = \"NODE_EDITOR\"\n pie.operator(\"object.view_menu\", text=\"UV Image Editor\", icon='IMAGE_COL').vp = \"IMAGE_EDITOR\"\n pie.operator(\"object.view_menu\", text=\"Video Sequece Editor\", icon='SEQUENCE').vp = \"SEQUENCE_EDITOR\"\n pie.operator(\"object.view_menu\", text=\"Movie Clip Editor\", icon='CLIP').vp = \"CLIP_EDITOR\"", "def create_menus( self ):", "def makeDrawer(self,node):\n drawer = MeshDrawer2D()\n drawer.setBudget(3000)\n\n drawerNode = drawer.getRoot()\n drawerNode.reparentTo(node)\n drawerNode.setDepthWrite(False)\n drawerNode.setTransparency(True)\n drawerNode.setTwoSided(True)\n drawerNode.setBin(\"fixed\",0)\n drawerNode.setLightOff(True)\n drawerNode.node().setBounds(OmniBoundingVolume())\n drawerNode.node().setFinal(True) \n \n # debug wire frame\n #cc = drawerNode.copyTo(node)\n #cc.setRenderModeWireframe()\n\n return drawer", "def main_menu_toolbar():\n\n pass", "def theme_picker_open(self):\r\n if not self.md_theme_picker:\r\n self.md_theme_picker = MDThemePicker()\r\n self.md_theme_picker.open()", "def set_device_theme(dname, theme_type, number=0):\n\n # log in theme app like i theme\n activity_name = theme_config.getValue(dname,'set_theme_pkg')\n #DEVICE = device.Device(dname)\n #DEVICE.app_operation(action='LAUNCH', pkg=activity_name)\n DEVICE = adbtools.AdbTools(dname)\n #DEVICE.start_application(activity_name)\n find_text = [u'忽略本次']\n try:\n threads = []\n install_app = threading.Thread(target=DEVICE.start_application(), args=(activity_name,))\n proc_process = threading.Thread(target=myuiautomator.do_popup_windows, args=(5, find_text, dname))\n threads.append(proc_process)\n threads.append(install_app)\n for t in threads:\n t.setDaemon(True)\n t.start()\n sleep(2)\n t.join()\n except Exception, ex:\n print ex\n sleep(5)\n if number == 0:\n if theme_type.upper() == 'VLIFE':\n vlife_theme_path = theme_config.getValue(dname, 'vlife_theme_path').split('|')\n elif theme_type.upper() == 'SYSTEM':\n vlife_theme_path = theme_config.getValue(dname, 'system_theme_path').split('|')\n else:\n vlife_theme_path = theme_config.getValue(dname, 'third_party_theme_path').split('|')\n else:\n tag = 'vlife_theme_path_' + str(number)\n vlife_theme_path = theme_config.getValue(dname, tag).split('|')\n\n width, height = DEVICE.get_screen_normal_size()\n\n try:\n\n for text in vlife_theme_path:\n # try to swipe screen multiple times\n if text.startswith('NAME'):\n search_text = text.split('_')[1]\n for i in range(5):\n result = click_text(dname, search_text)\n if result:\n break\n else:\n # swipe screen\n cmd = 'input swipe {0} {1} {2} {3} 200'.format(int(width)/2, int(height)/2, int(width)/2, int(height)/2-300)\n DEVICE.shell(cmd)\n sleep(1)\n else:\n click_text(dname,text)\n\n # for i in range(3):\n # x = 0\n # y = 0\n # element = myuiautomator.Element(dname)\n # event = myuiautomator.Event(dname)\n # if text.find(':') == -1:\n # value = unicode(text)\n # # because there is not 'click' action on text, so have to click next to element\n # else:\n # value = unicode(text.split(':')[0])\n # x = text.split(':')[1]\n # y = text.split(':')[2]\n # ele = element.findElementByName(value)\n # if ele is not None:\n # event.touch(ele[0]-int(x), ele[1]-int(y))\n # sleep(2)\n # break\n # else:\n # # swipe screen\n # cmd = 'input swipe {0} {1} {2} {3} 200'.format(int(width)/2, int(height)/2, int(width)/2, int(height)/2-300)\n # DEVICE.shell(cmd)\n # sleep(1)\n\n except Exception,ex:\n print ex\n # return to HOME\n for i in range(3):\n DEVICE.send_keyevent(4)", "def draw_main_menu():\n draw_cover()\n draw_menu_buttons()\n draw_border()", "def dark_theme(self):\n if self.actionDark_Theme.isChecked():\n QApplication.setStyle(QStyleFactory.create(\"Fusion\"))\n palette = QPalette()\n palette.setColor(QPalette.Window, QColor(53, 53, 53))\n palette.setColor(QPalette.WindowText, Qt.white)\n palette.setColor(QPalette.Base, QColor(15, 15, 15))\n palette.setColor(QPalette.AlternateBase, QColor(53, 53, 53))\n palette.setColor(QPalette.ToolTipBase, Qt.white)\n palette.setColor(QPalette.ToolTipText, Qt.white)\n palette.setColor(QPalette.Text, Qt.white)\n palette.setColor(QPalette.Button, QColor(53, 53, 53))\n palette.setColor(QPalette.ButtonText, Qt.white)\n palette.setColor(QPalette.BrightText, Qt.red)\n palette.setColor(QPalette.Highlight, QColor(0, 24, 193).lighter())\n palette.setColor(QPalette.HighlightedText, Qt.black)\n palette.setColor(QPalette.Disabled, QPalette.Text, Qt.darkGray)\n palette.setColor(\n QPalette.Disabled, QPalette.ButtonText, Qt.darkGray)\n app.setPalette(palette)\n return\n\n app.setPalette(self.defaultPalette)", "def draw_menu(self, context):\n if context.engine == 'RPR':\n layout = self.layout\n layout.popover('RPR_VIEW3D_PT_panel')", "def init_ui(self):\n self.parent.title(\"Roku Player Controller\")\n self.style.theme_use(\"default\")", "def _apply_base_theme(self, app):\n\n app.setStyle(\"Fusion\")\n\n with open(self._STYLESHEET) as stylesheet:\n app.setStyleSheet(stylesheet.read())", "def apply_theme(self, ax):\n pass", "def open(self):\n self._data['open_drawer'] = True", "def updateTheme(self):\n self.myUpdate(stateDict=None)", "def apply_style(self, app):\n\n darkPalette = QPalette()\n\n # base\n darkPalette.setColor(QPalette.WindowText, QColor(180, 180, 180))\n darkPalette.setColor(QPalette.Button, QColor(53, 53, 53))\n darkPalette.setColor(QPalette.Light, QColor(180, 180, 180))\n darkPalette.setColor(QPalette.Midlight, QColor(90, 90, 90))\n darkPalette.setColor(QPalette.Dark, QColor(35, 35, 35))\n darkPalette.setColor(QPalette.Text, QColor(180, 180, 180))\n darkPalette.setColor(QPalette.BrightText, QColor(180, 180, 180))\n darkPalette.setColor(QPalette.ButtonText, QColor(180, 180, 180))\n darkPalette.setColor(QPalette.Base, QColor(42, 42, 42))\n darkPalette.setColor(QPalette.Window, QColor(53, 53, 53))\n darkPalette.setColor(QPalette.Shadow, QColor(20, 20, 20))\n darkPalette.setColor(QPalette.Highlight, QColor(42, 130, 218))\n darkPalette.setColor(QPalette.HighlightedText, QColor(180, 180, 180))\n darkPalette.setColor(QPalette.Link, QColor(56, 252, 196))\n darkPalette.setColor(QPalette.AlternateBase, QColor(66, 66, 66))\n darkPalette.setColor(QPalette.ToolTipBase, QColor(53, 53, 53))\n darkPalette.setColor(QPalette.ToolTipText, QColor(180, 180, 180))\n\n # disabled\n darkPalette.setColor(\n QPalette.Disabled, QPalette.WindowText, QColor(127, 127, 127)\n )\n darkPalette.setColor(\n QPalette.Disabled, QPalette.Text, QColor(127, 127, 127)\n )\n darkPalette.setColor(\n QPalette.Disabled, QPalette.ButtonText, QColor(127, 127, 127)\n )\n darkPalette.setColor(\n QPalette.Disabled, QPalette.Highlight, QColor(80, 80, 80)\n )\n darkPalette.setColor(\n QPalette.Disabled, QPalette.HighlightedText, QColor(127, 127, 127)\n )\n\n app.setPalette(darkPalette)\n self._apply_base_theme(app)\n\n IconSet.current.set_color(QColor(180, 180, 180))", "def make_top_menus(self):\n menubar = tk.Menu(self)\n\n # create a pulldown menu for languages, and add it to the menu bar\n language_menu = tk.Menu(menubar, tearoff=0)\n language_menu.add_command(label=self.translate(\"English\"), command=lambda: self.replace_language('english'))\n language_menu.add_command(label=self.translate(\"Spanish\"), command=lambda: self.replace_language('spanish'))\n language_menu.add_command(label=self.translate(\"Portuguese\"), command=lambda: self.replace_language('portuguese'))\n menubar.add_cascade(label=self.translate(\"Languages\"), menu=language_menu)\n \n # create a pulldown menu for switching context areas, and add it to the menu bar\n context_menu = tk.Menu(menubar, tearoff=0)\n context_menu.add_command(label=self.translate(\"Chile\"), command=lambda: self.switch_context('Chile'))\n context_menu.add_command(label=self.translate(\"Indonesia\"), command=lambda: self.switch_context('Indonesia'))\n context_menu.add_command(label=self.translate(\"Luanda\"), command=lambda: self.switch_context('Luanda'))\n context_menu.add_command(label=self.translate(\"Querétaro\"), command=lambda: self.switch_context('Querétaro'))\n context_menu.add_command(label=self.translate(\"Rio de Janeiro\"), command=lambda: self.switch_context('Rio de Janeiro'))\n context_menu.add_command(label=self.translate(\"Santiago\"), command=lambda: self.switch_context('Santiago'))\n menubar.add_cascade(label=self.translate(\"Locations\"), menu=context_menu)\n \n # create a pulldown menu for arrangment, and add it to the menu bar\n language_menu = tk.Menu(menubar, tearoff=0)\n language_menu.add_command(label=self.translate(\"Graphs-Graphs\"), command=lambda: self.switch_arrangment(['Graph', 'Graph']))\n language_menu.add_command(label=self.translate(\"Graphs-Map\"), command=lambda: self.switch_arrangment(['Graph', 'Map']))\n language_menu.add_command(label=self.translate(\"Map-Graphs\"), command=lambda: self.switch_arrangment(['Map', 'Graph']))\n language_menu.add_command(label=self.translate(\"Map-Map\"), command=lambda: self.switch_arrangment(['Map', 'Map']))\n menubar.add_cascade(label=self.translate(\"Arrange\"), menu=language_menu)\n \n # create an exit command that closes the UI\n menubar.add_command(label=self.translate(\"Exit\"), command=self.destroy)\n \n # display the menu\n menubar.config(font=self.small_font)\n self.config(menu=menubar)\n \n return menubar", "def mainmenu_background():\n surface.fill((40, 0, 40))", "def new_theme(ctx, **defaults):\n from .quickstart import theme_quickstart\n\n project = ctx.get_project(silent=True)\n theme_quickstart(defaults, project=project)", "def apply_style(self, app):\n\n lightPalette = QPalette()\n\n # base\n lightPalette.setColor(QPalette.WindowText, QColor(0, 0, 0))\n lightPalette.setColor(QPalette.Button, QColor(240, 240, 240))\n lightPalette.setColor(QPalette.Light, QColor(180, 180, 180))\n lightPalette.setColor(QPalette.Midlight, QColor(200, 200, 200))\n lightPalette.setColor(QPalette.Dark, QColor(225, 225, 225))\n lightPalette.setColor(QPalette.Text, QColor(0, 0, 0))\n lightPalette.setColor(QPalette.BrightText, QColor(0, 0, 0))\n lightPalette.setColor(QPalette.ButtonText, QColor(0, 0, 0))\n lightPalette.setColor(QPalette.Base, QColor(237, 237, 237))\n lightPalette.setColor(QPalette.Window, QColor(240, 240, 240))\n lightPalette.setColor(QPalette.Shadow, QColor(20, 20, 20))\n lightPalette.setColor(QPalette.Highlight, QColor(76, 163, 224))\n lightPalette.setColor(QPalette.HighlightedText, QColor(0, 0, 0))\n lightPalette.setColor(QPalette.Link, QColor(0, 162, 232))\n lightPalette.setColor(QPalette.AlternateBase, QColor(225, 225, 225))\n lightPalette.setColor(QPalette.ToolTipBase, QColor(240, 240, 240))\n lightPalette.setColor(QPalette.ToolTipText, QColor(0, 0, 0))\n\n # disabled\n lightPalette.setColor(\n QPalette.Disabled, QPalette.WindowText, QColor(115, 115, 115)\n )\n lightPalette.setColor(\n QPalette.Disabled, QPalette.Text, QColor(115, 115, 115)\n )\n lightPalette.setColor(\n QPalette.Disabled, QPalette.ButtonText, QColor(115, 115, 115)\n )\n lightPalette.setColor(\n QPalette.Disabled, QPalette.Highlight, QColor(190, 190, 190)\n )\n lightPalette.setColor(\n QPalette.Disabled, QPalette.HighlightedText, QColor(115, 115, 115)\n )\n\n app.setPalette(lightPalette)\n\n self._apply_base_theme(app)\n IconSet.current.set_color(QColor(0, 0, 0))", "def draw(self, context):\n layout = self.layout\n pie = layout.menu_pie()\n\n pie.operator(\"wm.call_menu_pie\", text=\"Images\", icon='IMAGE_COL').name = \"PieAreaViewsImage\"\n pie.operator(\"wm.call_menu_pie\", text=\"Anim\", icon='IPO').name = \"PieAreaViewsAnim\"\n pie.operator(\"screen.screen_full_area\", text=\"Full Screen\", icon='FULLSCREEN_ENTER')\n pie.operator(\"object.view_menu\", text=\"3D View\", icon='VIEW3D').vp = \"VIEW_3D\"\n\n # DIAGONALS\n pie.operator(\"wm.call_menu_pie\", text=\"Utils\", icon='BUTS').name = \"PieAreaViewsUtils\"\n pie.separator()\n pie.operator(\"area.joinarea\", text=\"Join\", icon='X')\n pie.operator(\"wm.call_menu_pie\", text=\"Split\", icon='SPLITSCREEN').name = \"pie.split_viewport\"", "def set_style(self):", "def on_show_view(self):\n self.window.background_color = arcade.color.WHITE", "def mainmenu_background():\n gameDisplay.fill((40, 0, 40))", "def mainmenu_background():\n gameDisplay.fill((40, 0, 40))", "def setWidget(self, widget: QtWidgets.QWidget):\n super().setWidget(widget)\n if globalstuff.theme == 'dark':\n w = self.widget()\n w.setPalette(globalstuff.textpal)\n if hasattr(w, 'TreeWidget'):\n w.TreeWidget.setStyleSheet(globalstuff.treeqss)", "def create_layout() -> None:\n\n st.sidebar.title(\"Menu\")\n app_mode = st.sidebar.selectbox(\"Please select a page\", [' I. Homepage',\n \"II. Download data\" ,\n \"III. Statistic Data\",\n ' IV. AGF Indices',\n ' V. Notes',\n \" VI. Rank of patient\" ])\n \n if app_mode == ' I. Homepage':\n load_homepage() \n elif app_mode == \"III. Statistic Data\":\n leyer.leyer() \n elif app_mode == ' IV. AGF Indices':\n single.AGF_indices() \n elif app_mode == \"II. Download data\":\n download_data.download_data() \n elif app_mode == ' V. Notes':\n text_input.text_input()\n elif app_mode == \" VI. Rank of patient\":\n rank_of_patient.rank_of_patient()", "def root_wdgt(self):\n self.summarize()\n modes = ['Global', 'Single-Image']\n\n def logic(mode):\n # cache the widget later\n if mode == modes[0]:\n if self.global_walk is None:\n self.global_walk = self.global_walk_specifier()\n ipy_display(self.global_walk)\n elif mode == modes[1]:\n self.image_view = self.single_image_selector()\n # if self.image_view is None:\n # self.image_view = self.single_image_selector()\n # ipy_display(self.image_view)\n\n UI = interactive(\n logic, mode=widgets.ToggleButtons(options=modes, value=modes[0])\n )\n UI.children[-1].layout.height = '1000px'\n ipy_display(UI)", "def setup_theme():\n os.system('sudo apt install arc-theme')\n\n output = \"{padding}{mark} Installing theme...\"\n print(output.format(padding=LEFT_PADDING, mark=BALLOT_MARK))", "def paintCustomizeZone(self, idTag = None):\n if self.fileDialogShow:\n return\n if idTag == self.activeOption:\n return\n if idTag == \"skirt\" and self.avatarConfiguration[\"gender\"] == \"boy\":\n return \n if self.avatarConfiguration[\"gender\"] == \"girl\" and idTag in [\"shirt\", \"trousers\"]:\n return\n if not idTag:\n idTag = \"gender\"\n else:\n self.changeImageTab(idTag)\n self.removeWidgets()\n\n if idTag == \"gender\":\n self.changeBackgroundLeft(\"background_left.png\")\n self.paintGenderFrame()\n elif idTag == \"skin\":\n self.changeBackgroundLeft(\"background_left_big_palette.png\")\n self.paintColorPalette(self.updateSkin, \"skin\", \"skin\")\n elif idTag == \"head\":\n self.changeBackgroundLeft(\"background_left.png\")\n self.paintSizePalette(self.updateSizeHead)\n elif idTag == \"body\":\n self.changeBackgroundLeft(\"background_left.png\")\n self.paintSizePalette(self.updateSizeBody)\n elif idTag == \"mask\": \n self.changeBackgroundLeft(\"background_left.png\")\n self.paintSelectionItem(\"mask\")\n self.paintMaskOptions()\n elif idTag == \"hair\":\n self.changeBackgroundLeft(\"background_left_small_palette.png\")\n self.paintColorPalette(self.updateHairColor, \"hair\", \"hair\")\n elif idTag == \"shirt\":\n self.changeBackgroundLeft(\"background_left_big_palette.png\")\n self.paintColorPalette(self.updateShirtColor, \"cloth\", \"shirt\")\n self.paintWinterSelection(\"typeShirt\")\n elif idTag == \"trousers\":\n self.changeBackgroundLeft(\"background_left_big_palette.png\")\n self.paintColorPalette(self.updateTrouserColor, \"cloth\", \"trousers\")\n self.paintWinterSelection(\"typeTrousers\")\n elif idTag == \"skirt\":\n self.changeBackgroundLeft(\"background_left_big_palette.png\")\n self.paintColorPalette(self.updateSkirtColor, \"cloth\", \"skirt\")\n self.paintWinterSelection(\"typeSkirt\")\n elif idTag == \"shoes\":\n self.changeBackgroundLeft(\"background_left_big_palette.png\")\n self.paintColorPalette(self.updateShoesColor, \"cloth\", \"shoes\")\n self.activeOption = idTag", "def init_menu(self):\r\n # generate password\r\n gen_pwd_action = QtWidgets.QAction('Generate Password', self) \r\n gen_pwd_action.triggered.connect(self.create_password)\r\n\r\n # generate key file\r\n gen_key_action = QtWidgets.QAction('Generate Key File', self) \r\n gen_key_action.triggered.connect(self.create_key)\r\n\r\n # exit action, closes the program\r\n exit_action = QtWidgets.QAction('Exit', self) \r\n exit_action.setShortcut('Ctrl+Q')\r\n exit_action.setStatusTip('Exit application')\r\n exit_action.triggered.connect(app.quit)\r\n\r\n # Theme menus\r\n light_theme_action = QtWidgets.QAction('Light theme', self) \r\n light_theme_action.triggered.connect(self.light_theme)\r\n dark_theme_action = QtWidgets.QAction('Dark theme', self) \r\n dark_theme_action.triggered.connect(self.dark_theme)\r\n ubuntu_theme_action = QtWidgets.QAction('Ubuntu theme', self) \r\n ubuntu_theme_action.triggered.connect(self.ubuntu_theme)\r\n solaris_theme_action = QtWidgets.QAction('Solaris theme', self) \r\n solaris_theme_action.triggered.connect(self.solaris_theme)\r\n\r\n # Create menu bar and add action\r\n menuBar = self.menuBar()\r\n fileMenu = menuBar.addMenu('File')\r\n fileMenu.addAction(gen_pwd_action)\r\n fileMenu.addAction(gen_key_action)\r\n fileMenu.addSeparator()\r\n fileMenu.addAction(exit_action)\r\n themeMenu = menuBar.addMenu('Theme')\r\n themeMenu.addAction(light_theme_action)\r\n themeMenu.addAction(dark_theme_action)\r\n themeMenu.addAction(ubuntu_theme_action)\r\n themeMenu.addAction(solaris_theme_action)", "def theme(self, theme):\n\n self._theme = theme", "def _setupUi(self):\n self.setupUi(self)\n self.twTree.setStyleSheet(\"background-color: rgb(200, 200, 200)\")", "def dark_mode(app):\n palette = QPalette()\n palette.setColor(QPalette.Window, QColor(30, 30, 30))\n palette.setColor(QPalette.WindowText, QColor(225, 225, 225))\n palette.setColor(QPalette.Light, Qt.white)\n palette.setColor(QPalette.Midlight, QColor(225, 225, 225))\n palette.setColor(QPalette.Dark, QColor(65, 65, 65))\n palette.setColor(QPalette.Mid, QColor(160, 160, 160))\n palette.setColor(QPalette.BrightText, QColor(255, 51, 51))\n palette.setColor(QPalette.Button, QColor(40, 40, 40))\n palette.setColor(QPalette.Base, QColor(65, 65, 65))\n palette.setColor(QPalette.AlternateBase, QColor(50, 50, 50))\n palette.setColor(QPalette.ToolTipBase, Qt.white)\n palette.setColor(QPalette.ToolTipText, Qt.white)\n palette.setColor(QPalette.Text, QColor(225, 225, 225))\n palette.setColor(QPalette.ButtonText, QColor(225, 225, 225))\n palette.setColor(QPalette.Link, QColor(42, 130, 218))\n palette.setColor(QPalette.Highlight, QColor(42, 130, 218))\n palette.setColor(QPalette.HighlightedText, Qt.black)\n app.setPalette(palette)\n return app", "def on_show_view(self):\n self.window.background_color = arcade.color.BLACK", "def build(theme: str) -> sg.Window:\n\n # yapf: disable\n sg.theme(theme)\n des=['Top 10 de palabras que se encuentran primero de todas las partidas','Porcentaje de partidas por estado (terminada, cancelada,abandonadas)','Porcentaje de partidas finalizadas según género',\n 'Cantidad de partidas que se juegan para cada día de la semana','Promedio de tiempo de partidas finalizadas por nivel.','Porcentaje de palabras encontradas en las partidas timeout.'\n ]\n tab_layout=[[[sg.Text(des[x],font=(f\"{WINDOW_FONT}\", WINDOW_FONT_SIZE))],[sg.Canvas(key=f\"-CANVAS{x}-\")]] for x in range(len(des))]\n\n layout = [[sg.Text(f\"Estadisticas\",font=(WINDOW_TITLE_FONT, WINDOW_FONT_SIZE * 2))],\n [sg.TabGroup([[sg.Tab(f'Gráfico {l+1}',tab_layout[l],element_justification='center') for l in range(len(des))]])],\n [sg.Button(\"Menu\",key=\"-BACK BUTTON-\")]\n ]\n # yapf: enable\n stat_window = sg.Window(\"Stats\",layout,finalize=True,element_justification='center',margins=(10, 10),size=(900, 700))\n info = pd.read_csv(os.path.join(os.getcwd(), GAME_INFO_PATH),encoding='utf-8')\n draw_figure(stat_window['-CANVAS0-'].TKCanvas, top_10_palabras(info))\n stat_window.refresh() #Esta linea permite que se muestre más rápido el primer gráfico, dando tiempo a que se creen los demás\n draw_figure(stat_window['-CANVAS1-'].TKCanvas, partidas_por_estado(info))\n draw_figure(stat_window['-CANVAS2-'].TKCanvas, partidas_por_genero(info))\n draw_figure(stat_window['-CANVAS3-'].TKCanvas, partidas_por_dia(info))\n draw_figure(stat_window['-CANVAS4-'].TKCanvas,promedio_tiempo_por_nivel(info))\n draw_figure(stat_window['-CANVAS5-'].TKCanvas,cant_encontradas_en_timeout(info))\n\n return stat_window", "def change_theme(self):\n # get the QApplication instance, or crash if not set\n app = QtWidgets.QApplication.instance()\n if app is None:\n raise RuntimeError(\"No Qt Application found.\")\n\n if self.darkCheckBox.isChecked():\n app.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())\n else:\n app.setStyleSheet(\"\")", "def on_load_theme (self):\n\n\t\tif self.has_started:\n\t\t\tself.init_buffers()\n\t\t\tself.redraw_background()\n\t\t\tself.redraw_foreground()", "def change_theme(self):\n # get the QApplication instance, or crash if not set\n app = QApplication.instance()\n if app is None:\n raise RuntimeError(\"No Qt Application found.\")\n\n if self.darkCheckBox.isChecked():\n app.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())\n else:\n app.setStyleSheet(\"\")", "def decorate(self,widget,level): \n\n w = widget\n if level == False: return\n \n if type(w.style.background) != int:\n w.background = Background(w,self) \n \n if level == 'app': return\n \n for k,v in list(w.style.__dict__.items()):\n if k in ('border','margin','padding'):\n for kk in ('top','bottom','left','right'):\n setattr(w.style,'%s_%s'%(k,kk),v)\n\n w.paint = self.paint(w,w.paint)\n w.event = self.event(w,w.event)\n w.update = self.update(w,w.update)\n w.resize = self.resize(w,w.resize)\n w.open = self.open(w,w.open)", "def create_menu(self):\n about = gtk.ImageMenuItem(gtk.STOCK_ABOUT)\n about.connect_object('activate', self.about, 'about')\n about.show()\n\n# prefs = gtk.ImageMenuItem(gtk.STOCK_PREFERENCES)\n# prefs.connect_object('activate', self.prefs, 'prefs')\n# prefs.show()\n\n quit = gtk.ImageMenuItem(gtk.STOCK_QUIT)\n quit.connect_object('activate', self.exit, 'quit')\n quit.show()\n\n menu = gtk.Menu()\n menu.append(about)\n# menu.append(prefs)\n menu.append(quit)\n return menu", "def create_layout( self ):", "def generateScheme(self, apply=True):\n BASE_COLOR = self.baseColor\n HIGHLIGHT_COLOR = self.highlightColor\n BRIGHTNESS_SPREAD = self.spread\n \n if self.__lightness(BASE_COLOR) > 0.5:\n SPREAD = 100/BRIGHTNESS_SPREAD\n else:\n SPREAD = 100*BRIGHTNESS_SPREAD\n \n if self.__lightness(HIGHLIGHT_COLOR)>0.6:\n HIGHLIGHTEDTEXT_COLOR= BASE_COLOR.darker(SPREAD*2)\n else:\n HIGHLIGHTEDTEXT_COLOR= BASE_COLOR.lighter(SPREAD*2)\n \n self.palette.setBrush(QtGui.QPalette.Window, QtGui.QBrush(BASE_COLOR))\n self.palette.setBrush(QtGui.QPalette.WindowText, QtGui.QBrush(BASE_COLOR.lighter(SPREAD)))\n self.palette.setBrush(QtGui.QPalette.Foreground, QtGui.QBrush(BASE_COLOR.lighter(SPREAD)))\n self.palette.setBrush(QtGui.QPalette.Base, QtGui.QBrush(BASE_COLOR))\n self.palette.setBrush(QtGui.QPalette.AlternateBase, QtGui.QBrush(BASE_COLOR.darker(SPREAD)))\n self.palette.setBrush(QtGui.QPalette.ToolTipBase, QtGui.QBrush(BASE_COLOR))\n self.palette.setBrush(QtGui.QPalette.ToolTipText, QtGui.QBrush(BASE_COLOR.lighter(SPREAD)))\n self.palette.setBrush(QtGui.QPalette.Text, QtGui.QBrush(BASE_COLOR.lighter(SPREAD*1.2)))\n self.palette.setBrush(QtGui.QPalette.Button, QtGui.QBrush(BASE_COLOR.lighter(SPREAD/3)))\n self.palette.setBrush(QtGui.QPalette.ButtonText, QtGui.QBrush(BASE_COLOR.lighter(SPREAD)))\n self.palette.setBrush(QtGui.QPalette.BrightText, QtGui.QBrush(QtGui.QColor(240, 240, 240)))\n \n self.palette.setBrush(QtGui.QPalette.Light, QtGui.QBrush(BASE_COLOR.lighter(SPREAD)))\n self.palette.setBrush(QtGui.QPalette.Midlight, QtGui.QBrush(BASE_COLOR.lighter(SPREAD/2)))\n self.palette.setBrush(QtGui.QPalette.Dark, QtGui.QBrush(BASE_COLOR.darker(SPREAD)))\n self.palette.setBrush(QtGui.QPalette.Mid, QtGui.QBrush(BASE_COLOR)) \n self.palette.setBrush(QtGui.QPalette.Shadow, QtGui.QBrush(BASE_COLOR.darker(SPREAD))) \n \n self.palette.setBrush(QtGui.QPalette.Highlight, QtGui.QBrush(HIGHLIGHT_COLOR))\n self.palette.setBrush(QtGui.QPalette.HighlightedText, QtGui.QBrush(HIGHLIGHTEDTEXT_COLOR))\n if apply:\n QtGui.QApplication.setPalette(self.palette)", "def on_show_view(self):\n self.setup()\n arcade.set_background_color(arcade.color.BLACK)", "def menu_screen(win):\n\tpass", "def CreateShapesWithStyle(self):\r\n\r\n useAero = (GetManager(self.GetParent()).GetAGWFlags() & AUI_MGR_AERO_DOCKING_GUIDES) != 0\r\n useWhidbey = (GetManager(self.GetParent()).GetAGWFlags() & AUI_MGR_WHIDBEY_DOCKING_GUIDES) != 0\r\n\r\n self._useAero = 0\r\n if useAero:\r\n self._useAero = 1\r\n elif useWhidbey:\r\n self._useAero = 2\r\n \r\n if useAero:\r\n sizeX, sizeY = aeroguideSizeX, aeroguideSizeY\r\n elif useWhidbey:\r\n sizeX, sizeY = whidbeySizeX, whidbeySizeY \r\n else:\r\n sizeX, sizeY = guideSizeX, guideSizeY\r\n\r\n rectLeft = wx.Rect(0, sizeY, sizeY, sizeX)\r\n rectTop = wx.Rect(sizeY, 0, sizeX, sizeY)\r\n rectRight = wx.Rect(sizeY+sizeX, sizeY, sizeY, sizeX)\r\n rectBottom = wx.Rect(sizeY, sizeX + sizeY, sizeX, sizeY)\r\n rectCenter = wx.Rect(sizeY, sizeY, sizeX, sizeX)\r\n \r\n if not self._useAero:\r\n\r\n self.targetLeft = AuiDockingGuideWindow(self, rectLeft, wx.LEFT, True, useAero)\r\n self.targetTop = AuiDockingGuideWindow(self, rectTop, wx.TOP, True, useAero)\r\n self.targetRight = AuiDockingGuideWindow(self, rectRight, wx.RIGHT, True, useAero)\r\n self.targetBottom = AuiDockingGuideWindow(self, rectBottom, wx.BOTTOM, True, useAero)\r\n self.targetCenter = AuiDockingGuideWindow(self, rectCenter, wx.CENTER, True, useAero)\r\n\r\n \r\n # top-left diamond\r\n tld = [wx.Point(rectTop.x, rectTop.y+rectTop.height-8),\r\n wx.Point(rectLeft.x+rectLeft.width-8, rectLeft.y),\r\n rectTop.GetBottomLeft()]\r\n # bottom-left diamond\r\n bld = [wx.Point(rectLeft.x+rectLeft.width-8, rectLeft.y+rectLeft.height),\r\n wx.Point(rectBottom.x, rectBottom.y+8),\r\n rectBottom.GetTopLeft()]\r\n # top-right diamond\r\n trd = [wx.Point(rectTop.x+rectTop.width, rectTop.y+rectTop.height-8),\r\n wx.Point(rectRight.x+8, rectRight.y),\r\n rectRight.GetTopLeft()] \r\n # bottom-right diamond\r\n brd = [wx.Point(rectRight.x+8, rectRight.y+rectRight.height),\r\n wx.Point(rectBottom.x+rectBottom.width, rectBottom.y+8),\r\n rectBottom.GetTopRight()]\r\n\r\n self._triangles = [tld[0:2], bld[0:2],\r\n [wx.Point(rectTop.x+rectTop.width-1, rectTop.y+rectTop.height-8),\r\n wx.Point(rectRight.x+7, rectRight.y)],\r\n [wx.Point(rectRight.x+7, rectRight.y+rectRight.height),\r\n wx.Point(rectBottom.x+rectBottom.width-1, rectBottom.y+8)]]\r\n \r\n region = wx.Region()\r\n region.UnionRect(rectLeft)\r\n region.UnionRect(rectTop)\r\n region.UnionRect(rectRight)\r\n region.UnionRect(rectBottom)\r\n region.UnionRect(rectCenter)\r\n region.UnionRegion(wx.RegionFromPoints(tld))\r\n region.UnionRegion(wx.RegionFromPoints(bld))\r\n region.UnionRegion(wx.RegionFromPoints(trd))\r\n region.UnionRegion(wx.RegionFromPoints(brd))\r\n\r\n elif useAero:\r\n\r\n self._aeroBmp = aero_dock_pane.GetBitmap()\r\n region = wx.RegionFromBitmap(self._aeroBmp)\r\n\r\n self._allAeroBmps = [aero_dock_pane_left.GetBitmap(), aero_dock_pane_top.GetBitmap(),\r\n aero_dock_pane_right.GetBitmap(), aero_dock_pane_bottom.GetBitmap(),\r\n aero_dock_pane_center.GetBitmap(), aero_dock_pane.GetBitmap()]\r\n self._deniedBitmap = aero_denied.GetBitmap()\r\n self._aeroRects = [rectLeft, rectTop, rectRight, rectBottom, rectCenter]\r\n self._valid = True\r\n\r\n elif useWhidbey:\r\n\r\n self._aeroBmp = whidbey_dock_pane.GetBitmap()\r\n region = wx.RegionFromBitmap(self._aeroBmp)\r\n\r\n self._allAeroBmps = [whidbey_dock_pane_left.GetBitmap(), whidbey_dock_pane_top.GetBitmap(),\r\n whidbey_dock_pane_right.GetBitmap(), whidbey_dock_pane_bottom.GetBitmap(),\r\n whidbey_dock_pane_center.GetBitmap(), whidbey_dock_pane.GetBitmap()]\r\n self._deniedBitmap = whidbey_denied.GetBitmap()\r\n self._aeroRects = [rectLeft, rectTop, rectRight, rectBottom, rectCenter]\r\n self._valid = True\r\n \r\n \r\n self.region = region", "def initAppletDrawerUic(self):\n with Tracer(traceLogger):\n # Load the ui file (find it in our own directory)\n localDir = os.path.split(__file__)[0]+'/'\n # (We don't pass self here because we keep the drawer ui in a separate object.)\n self.drawer = uic.loadUi(localDir+\"/dataSelectionDrawer.ui\")\n\n # Set up our handlers\n self.drawer.addFileButton.clicked.connect(self.handleAddFileButtonClicked)\n self.drawer.addFileButton.setIcon( QIcon(ilastikIcons.AddSel) )\n\n self.drawer.addMassButton.clicked.connect(self.handleMassAddButtonClicked)\n self.drawer.addMassButton.setIcon( QIcon(ilastikIcons.AddSel) )\n\n self.drawer.addStackButton.clicked.connect(self.handleAddStackButtonClicked)\n self.drawer.addStackButton.setIcon( QIcon(ilastikIcons.AddSel) )\n\n self.drawer.addStackFilesButton.clicked.connect(self.handleAddStackFilesButtonClicked)\n self.drawer.addStackFilesButton.setIcon( QIcon(ilastikIcons.AddSel) )\n\n self.drawer.removeFileButton.setEnabled(False)\n self.drawer.removeFileButton.clicked.connect(self.handleRemoveButtonClicked)\n self.drawer.removeFileButton.setIcon( QIcon(ilastikIcons.RemSel) )", "def __init__(self,baseColor=QtGui.QColor(50,50,50), highlightColor=QtGui.QColor(\"yellow\"), spread=2.5):\n self.palette = QtGui.QPalette()\n self.baseColor = baseColor\n self.highlightColor = highlightColor\n self.spread = spread\n self.generateScheme()\n QtGui.QApplication.setStyle(\"Plastique\")", "def main():\n mainWin = gtk.Window()\n \n #initialize additional widgets\n mainLayout = gtk.Layout()\n artMenuItem = radialmenuitem.RadItem()\n #Connect particular events to particular widgets\n #Add above widgets to window\n mainWin.add(artMenuItem)\n\n mainWin.fullscreen()\n mainWin.set_decorated(0)\n mainWin.show_all()\n\n gtk.main() #main loop", "def switch_state():\n\tDmg.OpenWindow()", "def light_mode(grid: bool = False) -> sns.set_theme:\n if grid:\n return sns.set_theme(style=\"whitegrid\")\n sns.set_theme(style=\"white\")", "def on_show_view(self):\r\n self.setup()\r\n arcade.set_background_color(BACKGROUND_COLOR)", "def create_widgets(self):", "async def update_theme(q: Q):\n\n copy_expando(q.args, q.client)\n\n if q.client.theme_dark:\n logging.info('Updating theme to dark mode')\n\n q.client.path_architecture = q.app.paths_architecture['dark']\n\n q.page['meta'].theme = 'neon'\n q.page['header'].icon_color = 'black'\n else:\n logging.info('Updating theme to light mode')\n\n q.client.path_architecture = q.app.paths_architecture['light']\n\n q.page['meta'].theme = 'light'\n q.page['header'].icon_color = '#CDDD38'\n\n q.page['misc'].items[3].toggle.value = q.client.theme_dark\n\n if q.client['#'] == 'home':\n q.page['home'].items[2].text.content = f'''<center>\n <img src=\"{q.client.path_architecture}\" width=\"540px\"></center>'''\n elif q.client['#'] == 'resources':\n q.page['code_examples'] = cards.code_examples(\n code_function=q.client.code_function,\n theme_dark=q.client.theme_dark\n )\n\n await q.page.save()", "def dmenu_setup(args):\n scheme = namedtuple(\n 'dmenu',\n [\n 'target', # pass / apps/ notes / search\n 'prefix', # location prefix (base dir)\n 'suffix', # file extension to look for\n 'font', # dmenu font name and size\n 'nb','nf','sb','sf', # dmenu color:\n # n=normal / s=selected,\n # b=background, f=foreground\n ])\n\n dmenu = \"\"\n if args.passw:\n dmenu = scheme(\n target='pass',\n prefix = os.getenv\\\n ('PASSWORD_STORE_DIR',os.path.normpath\n (os.path.expanduser\\\n ('~/.password-store')\\\n )\\\n ),\n suffix=\".gpg\",\n font='Dejavu Sans Mono:medium:size=18',\n nb='#191919', nf='#ff0000', sb='#ff9318', sf='#191919',\n )\n if args.apps:\n dmenu = scheme(\n target='apps',\n prefix=\"/usr/share/applications\",\n suffix=\".desktop\",\n font='Dejavu Sans Mono:medium:size=18',\n nb='#191919', nf='#2e9ef4', sb='#2e9ef4', sf='#191919',\n )\n if args.notes:\n dmenu = scheme(\n target='notes',\n prefix=os.path.expanduser('~/git/notes'),\n suffix=\".md\",\n font='Dejavu Sans Mono:medium:size=18',\n nb='#191919', nf='#2aa198', sb='#2aa198', sf='#191919',\n )\n if args.search:\n dmenu = scheme(\n target='search',\n prefix=os.path.expanduser('~/work'),\n suffix=\"\",\n font='Dejavu Sans Mono:medium:size=18',\n nb='#191919', nf='#2aa198', sb='#11D91E', sf='#191919',\n )\n \n check_dir_exist(dmenu)\n return dmenu", "def set_group_theme(request):\n\treturn render(request,\"404.html\",{})", "def setupNewGame(self):\r\n self.level = 1\r\n self.num_cows = 2\r\n self.num_farmers = 1\r\n self.levelHeading = Text(self.gameDisplay, 120, 425, 175, self.light_orange, \"Farm 1\")\r\n self.shield_indicator.image = self.greenShield\r\n updatedHeading = self.levelHeading\r\n self.startUX[0] = updatedHeading", "def main():\r\n root = tk.Tk()\r\n app = Home(root)\r\n root.geometry(app.resize())\r\n root.configure(background = jt.color_background)\r\n root.mainloop()", "def menu():\n ferme_fenetre()\n Menu()", "def on_show_view(self):\n\n # Makes the background darker\n arcade.set_background_color([rgb - 50 for rgb in arcade.color.DARK_BLUE_GRAY])\n\n # Enable the UIManager when the view is showm.\n self.manager.enable()", "def create_widgets( self ):", "def load_theme_values(self): \n pass", "def refresh(self):\n self._themes = {}\n for theme in starchain(ldr(self.app) for ldr in self.loaders):\n if self.valid_app_id(theme.application):\n self.themes[theme.identifier] = theme\n self.register_theme_assets()", "def redraw_menu(self, event):\n self.appInit()\n self.redraw()", "def main_menu(self):\n\n # Set the window background\n self.palette = QPalette()\n self.pixmap = QPixmap('./pictures/menu_cat.png').scaled(860, 640)\n self.palette.setBrush(QPalette.Background, QBrush(self.pixmap))\n self.setPalette(self.palette)\n\n for item in self.mainmenu_items:\n item.show()\n for item in self.mapmenu_items:\n item.hide()", "def createMenubar(self):\r\n # Create menubar\r\n self.menubar = tk.Menu(tearoff=False)\r\n self.root.config(menu=self.menubar)\r\n filemenu = tk.Menu(self.menubar,tearoff=False)\r\n filemenu.add_command(label=\"Edit Video/fNIRS Sources\",command=self.launchImportWindow)\r\n filemenu.add_command(label=\"Synchronise Video/fNIRS\",command=self.launchSyncToolWindow)\r\n filemenu.add_command(label=\"Help\",command=self.launchHelpWindow)\r\n filemenu.add_command(label=\"Quit\",command=self.quit)\r\n self.menubar.add_cascade(label=\"Project\",menu=filemenu)", "def draw_window_pane():\n houseturtle.begin_fill()\n for y in range(4):\n houseturtle.pendown()\n houseturtle.forward(35)\n houseturtle.left(90)\n houseturtle.penup()\n houseturtle.end_fill()", "def activate(self, sidebarwin):\n\t\tself.sidebarWin = sidebarwin\n\t\tLogging.info(\"Disabling tasks in menu\", kw = \"visualizer\")\n\t\tself.menuManager.mainToolbar.EnableTool(MenuManager.ID_ADJUST, 0)\n\t\tself.menuManager.mainToolbar.EnableTool(MenuManager.ID_RESTORE, 0)\n\t\tself.menuManager.mainToolbar.EnableTool(MenuManager.ID_COLOCALIZATION, 0)\n\t\tself.menuManager.mainToolbar.EnableTool(MenuManager.ID_COLORMERGING, 0)\n\t\tself.visualizer.sliderPanel.Show(0)\n\t\tself.origSliderWinSize = self.visualizer.sliderWin.GetSize()\n\t\tself.visualizer.sliderWin.SetDefaultSize((-1, 64))\n\t\t\n\t\tif not self.urmaswin:\n\t\t\tself.urmaswin = GUI.Urmas.UrmasWindow.UrmasWindow(self.parent, \\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.visualizer.menuManager, \\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.visualizer.mainwin.taskWin, \\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.visualizer)\n\t\t\t\n\t\telse:\n\t\t\tprint \"Restoring\",self.urmaswin\n\t\t\tself.urmaswin.Show(1)\n\t\t\tself.parent.Show(1)\n\t\t\tself.urmaswin.enableRendering(1)\n\t\t\tself.urmaswin.controlpanel.Show(1)\n\t\t\tself.urmaswin.createMenu(self.visualizer.menuManager)\n\t\t\twx.CallAfter(self.urmaswin.updateRenderWindow)\n\t\t\t\n\t\treturn self.urmaswin", "def create_menu():\n MenuData = [\n (\"&Draw Variables\",drawable.ask),\n (\"&Show Variables\",printall),\n (\"&Print Variables\",printval),\n (\"&Edit Variable\",edit),\n (\"&Rename Variable\",rename),\n (\"&Forget Variables\",forget),\n (\"---\",None),\n (\"&Create Plane\",\n [(\"Coordinates\", \n [(\"Point and normal\", createPlaneCoordsPointNormal),\n (\"Three points\", createPlaneCoords3Points),\n ]), \n (\"Visually\", \n [(\"Three points\", createPlaneVisual3Points),\n ]),\n ]),\n (\"&Select Plane\",planes.ask),\n (\"&Draw Selection\",planes.draw),\n (\"&Forget Selection\",planes.forget),\n (\"---\",None),\n (\"&Pick Actors\",pick_actors),\n (\"&Pick Elements\",pick_elements),\n (\"&Pick Points\",pick_points),\n (\"&Pick Edges\",pick_edges),\n (\"---\",None),\n ('&Selection',\n [('&Create Report',report_selection),\n ('&Set Property',setprop_selection),\n ('&Grow',grow_selection),\n ('&Partition',partition_selection),\n ('&Get Partition',get_partition),\n ('&Export',export_selection),\n ]),\n (\"---\",None),\n ('&Query',\n [('&Actors',query_actors),\n ('&Elements',query_elements),\n ('&Points',query_points),\n ('&Edges',query_edges),\n ('&Distances',query_distances),\n ]),\n (\"---\",None),\n (\"&Close\",close_menu),\n ]\n return widgets.Menu('Tools',items=MenuData,parent=GD.gui.menu,before='help')", "def my_theme() -> Dict[str, Any]:\n return {\n \"config\": {\n \"view\": {\"height\": 400, \"width\": 600},\n \"legend\": {\"titleFontSize\": 20, \"labelFontSize\": 16},\n \"axis\": {\"grid\": False, \"labelFontSize\": 16, \"titleFontSize\": 20},\n \"header\": {\"titleFontSize\": 22, \"labelFontSize\": 18},\n \"background\": \"white\",\n }\n }", "def use_my_theme():\n # register and enable the theme\n alt.themes.register(\"my_theme\", my_theme)\n alt.themes.enable(\"my_theme\")", "def use_my_theme():\n # register and enable the theme\n alt.themes.register(\"my_theme\", my_theme)\n alt.themes.enable(\"my_theme\")", "def __init__(self: object) -> None:\n super().__init__()\n self.title(\"dnazip\")\n self.configure(bg='#ebebeb')\n self.create_main()\n self.create_menu()\n self.create_buttons()\n self.file = None", "def __init__(self, theme_dir):\n\n if not os.path.isdir(theme_dir): \n print(\"%s not a valid directory, please check!\" % theme_dir, file=sys.stderr)\n sys.exit(1)\n for dirname, dirnames, filenames in os.walk(theme_dir):\n for subdirname in dirnames:\n full_path = os.path.join(dirname, subdirname)\n self.theme_dir.append(full_path)\n print(\"read theme %s\" % full_path, file=sys.stdout)\n print(\"all themes loaded!\", file=sys.stdout)", "def createWindow(self):\r\n\t\t# give the window a title\r\n\t\tself.parent.title( 'Acrobat Data Acquisition')\r\n\t\t# set the style\r\n\t\tself.style = ttk.Style()\r\n\t\tself.style.theme_use('default')\r\n\t\tself.pack(fill= tk.BOTH, expand=1)", "def createMenus(self):\n\n self.fileMenu = QMenu(\"&File\", self)\n self.fileMenu.addAction(self.openAct)\n self.fileMenu.addAction(self.addAct)\n self.fileMenu.addSeparator()\n # self.fileMenu.addAction(self.showSessionAct)\n self.fileMenu.addAction(self.exitAct)\n\n self.helpMenu = QMenu(\"&Help\", self)\n self.helpMenu.addAction(self.aboutAct)\n self.helpMenu.addAction(self.aboutQtAct)\n\n self.viewMenu = QMenu(\"&View\", self)\n\n self.sortMenu = QMenu(\"Sort by\", self.viewMenu, enabled=False)\n self.groupMenu = QMenu(\"Group by\", self.viewMenu, enabled=False)\n\n self.showGroupMenu = QMenu(\"Load Group\", self.fileMenu, enabled=False)\n self.addGroupDataMenu = QMenu('Add Group', self.fileMenu, enabled=False)\n self.fileMenu.addMenu(self.showGroupMenu)\n self.fileMenu.addMenu(self.addGroupDataMenu)\n self.fileMenu.addAction(self.seeAllGroupAct)\n self.viewMenu.addMenu(self.groupMenu)\n self.viewMenu.addMenu(self.sortMenu)\n\n # Add filters to \"Sort by\"\n self.create_sort_menu()\n self.sortMenu.addAction(self.ageSortAct)\n self.sortMenu.addAction(self.sexSortAct)\n self.sortMenu.addAction(self.genotypeSortAct)\n self.sortMenu.addAction(self.speciesSortAct)\n self.sortMenu.addAction(self.subjectIDSortAct)\n self.sortMenu.addAction(self.weightSortAct)\n self.sortMenu.addAction(self.birthSortAct)\n self.sortMenu.addSeparator()\n\n self.sortMenu.addAction(self.fluorescenceSortAct)\n self.sortMenu.addAction(self.imagesegSortAct)\n self.sortMenu.addAction(self.rasterSortAct)\n\n # Add filters to \"Group by\"\n self.create_group_menu()\n self.groupMenu.addAction(self.ageGroupAct)\n self.groupMenu.addAction(self.sexGroupAct)\n self.groupMenu.addAction(self.genotypeGroupAct)\n self.groupMenu.addAction(self.speciesGroupAct)\n self.groupMenu.addAction(self.subjectIDGroupAct)\n self.groupMenu.addAction(self.weightGroupAct)\n self.groupMenu.addAction(self.birthGroupAct)\n\n self.groupMenu.addSeparator()\n\n self.groupMenu.addAction(self.fluorescenceGroupAct)\n self.groupMenu.addAction(self.imagesegGroupAct)\n self.groupMenu.addAction(self.rasterGroupAct)\n\n self.menuBar().addMenu(self.fileMenu)\n self.menuBar().addMenu(self.viewMenu)\n self.menuBar().addMenu(self.helpMenu)", "def default_door():\n X = [0.0, 0.14, 1.12, 1.26]\n Y = [0.0, 0.14, 2.24]\n Z = [-0.14, 0.14]\n V, F = True, False\n occupancy = [\n [[V], [V]],\n [[V], [F]],\n [[V], [V]]\n ]\n return w7.window(X, Y, Z, occupancy)", "def setup_draw(self):\n pass", "def update_drawer_img(self):\n self.drawer = aggdraw.Draw(self.img)\n self.drawer.settransform(self.coordspace_transform)", "def makeMenu(self):\n\t\tself.fileMenu = self.menuBar().addMenu(self.tr(\"&Arquivo\"))\n\t\tself.fileMenu.addAction(self.newAct)\n\t\tself.fileMenu.addAction(self.openAct)\n\t\tself.fileMenu.addAction(self.saveAct)\n\t\tself.fileMenu.addAction(self.exportAct)\n\t\tself.fileMenu.addSeparator() \n\t\tself.fileMenu.addAction(self.exitAct)\n\n\t\tself.editMenu = self.menuBar().addMenu(self.tr(\"&Editar\"))\n\t\t\n\t\tself.helpMenu = self.menuBar().addMenu(self.tr(\"&Ajuda\"))\n\t\tself.helpMenu.addAction(self.aboutAct)", "def _add_color_menu(self):\n print 'adding color menu'\n self.menuBar.addcascademenu('Color', 'Color Atoms'); \n c_lambda = lambda: self.color_wireframe('cpk');\n self.menuBar.addmenuitem('Color Atoms','command','Color wireframes cpk', command=c_lambda, label='cpk')\n c_lambda = lambda: self.color_wireframe('type');\n self.menuBar.addmenuitem('Color Atoms','command','Color wireframes by type', command=c_lambda, label='type')\n c_lambda = lambda: self.color_wireframe('chain');\n self.menuBar.addmenuitem('Color Atoms','command','color wireframes by chain', command=c_lambda, label='chain')\n c_lambda = lambda: self.color_wireframe('hydrogen_type');\n self.menuBar.addmenuitem('Color Atoms','command','color wireframes by H type', command=c_lambda, label='H Type')\n \n self.menuBar.addcascademenu('Color', 'Color Trace')\n self.menuBar.addmenuitem('Color Trace','command','Color tubes by secondary', command=self.color_trace_by_secondary,label='secondary')\n self.menuBar.addmenuitem('Color Trace','command','Color tubes by type', command=self.color_tubes_type,label='type')\n self.menuBar.addmenuitem('Color Trace','command','Color tubes by chain', command=self.color_tubes_chain,label='chain')\n\n self.menuBar.addcascademenu('Color', 'Color Volumes')\n self.menuBar.addmenuitem('Color Volumes','command','Color volumes cpk', command=self.color_volumes_cpk,label='cpk')\n self.menuBar.addmenuitem('Color Volumes','command','Color volumes by type', command=self.color_volumes_type,label='type')\n self.menuBar.addmenuitem('Color Volumes','command','Color volumes by chain', command=self.color_volumes_chain,label='chain')\n\n # create menu items for .features keys for atoms and residues\n if self.system != 'None' and self.system != None:\n key_store = {}\n key_store['atom'] = self.system.ProteinList[0].atoms[0].features.keys()\n key_store['residue'] = self.system.ProteinList[0].residues[0].features.keys()\n for run_type in ['atom', 'residue']:\n broken = 0\n for key in key_store[run_type]:\n for pol in self.system.ProteinList:\n if key == 'domain':\n self.print_domain_info(pol)\n normalized = 1\n # if the feature includes non-digits, pass. if it is all digits, see if \n # it is normalized\n if run_type == 'atom':\n item_list = pol.atoms\n elif run_type == 'residue':\n item_list = pol.residues\n same_val_count = 0\n try:\n item_list[0].features[key]\n except KeyError:\n continue\n else:\n first_val = item_list[0].features[key]\n for item in item_list:\n try:\n feature = item.features[key]\n except KeyError:\n print 'key error on %s, breaking'%(key)\n broken = 1\n break\n try:\n int(feature)\n except ValueError:\n print '%s not digit, breaking'%(feature)\n broken = 1\n break\n else:\n if feature != -1 and (feature < 0.0 or feature > 1.0):\n normalized = 0\n if feature == first_val:\n same_val_count += 1\n if same_val_count == len(item_list):\n print '%s all the same value; breaking'%(key)\n broken = 1\n break\n if key == 'domain':\n if item.features[key] == 0.0:\n item.features[key] = -1\n else:\n # if not normalized, make a new key called key+'_normalized', and swap the old\n # key with the new key to color by it\n old_key = copy.copy(key)\n if not normalized and (key+'_normalized' not in item.features.keys()):\n min_f = 1000000\n max_f = -1000000\n for item2 in item_list:\n feature = item2.features[key]\n if feature != -1:\n if feature < min_f:\n min_f = feature\n if feature > max_f:\n max_f = feature\n key = key + '_normalized'\n for item2 in item_list:\n if item2.features[old_key] != -1.0:\n d = (item2.features[old_key]-min_f) / (max_f-min_f+0.0)\n item2.features[key] = d\n else:\n item2.features[key] = -1.0\n if run_type == 'residue':\n c_lambda1 = lambda p=pol, k=key: self.color_trace_by_residue_feature(p, k)\n self.menuBar.addmenuitem('Color Trace','command','Color trace by res '+key, command=c_lambda1, label='%s %s'%(pol.chain_name, key))\n c_lambda2 = lambda p=pol, k=key: self.color_volume_by_residue_feature(p, k)\n self.menuBar.addmenuitem('Color Volumes','command','Color volumes by res '+key, command=c_lambda2, label='%s %s'%(pol.chain_name, key))\n c_lambda3 = lambda p=pol, k=key: self.color_atoms_by_residue_feature(p, k)\n self.menuBar.addmenuitem('Color Atoms','command','Color atoms by res '+key, command=c_lambda3, label='%s %s'%(pol.chain_name, key))\n elif run_type == 'atom':\n c_lambda1 = lambda p=pol, k=key: self.color_trace_by_atom_feature(p, k)\n self.menuBar.addmenuitem('Color Trace','command','Color trace by atom '+key, command=c_lambda1, label='%s %s'%(pol.chain_name, key))\n c_lambda2 = lambda p=pol, k=key: self.color_volume_by_atom_feature(p, k)\n self.menuBar.addmenuitem('Color Volumes','command','Color volumes by atom '+key, command=c_lambda2, label='%s %s'%(pol.chain_name, key))\n c_lambda3 = lambda p=pol, k=key: self.color_atoms_by_atom_feature(p, k)\n self.menuBar.addmenuitem('Color Atoms','command','Color atoms by atom '+key, command=c_lambda3, label='%s %s'%(pol.chain_name, key))\n key = old_key\n #broken = 1\n #break\n if broken:\n break", "def __init__(self):\r\n super().__init__()\r\n self._setupSideMenu()", "def toggle_mode(mode: ThemeMode):\n with winreg.OpenKey(\n winreg.HKEY_CURRENT_USER,\n (r'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Themes'\n r'\\Personalize'),\n access=winreg.KEY_ALL_ACCESS) as hkey:\n # Change mode for both apps and system theme to sync mode.\n winreg.SetValueEx(\n hkey, 'AppsUseLightTheme', 0, winreg.REG_DWORD, mode.value)\n winreg.SetValueEx(\n hkey, 'SystemUsesLightTheme', 0, winreg.REG_DWORD, mode.value)", "def makeActionMenu(self):\n\t\tself.newAct = QtGui.QAction(self.tr(\"&Novo\"),self)\n\t\tself.newAct.setShortcut(self.tr(\"Ctrl+N\"))\n\t\tself.newAct.setStatusTip(self.tr(\"Cria uma nova area de desenho em branco\"))\n\t\tself.connect(self.newAct,SIGNAL(\"triggered()\"),self.glwidget.newFile)\n\t\t\n\t\tself.openAct = QtGui.QAction(self.tr(\"&Abrir\"),self)\n\t\tself.openAct.setShortcut(self.tr(\"Ctrl+o\"))\n\t\tself.openAct.setStatusTip(self.tr(\"Abrir arquivo do elvis\"))\n\t\tself.connect(self.openAct,SIGNAL(\"triggered()\"),self.glwidget.openElvisfile)\t\t\n\n\t\tself.saveAct = QtGui.QAction(self.tr(\"&Salvar\"),self)\n\t\tself.saveAct.setShortcut(self.tr(\"Ctrl+S\"))\n\t\tself.saveAct.setStatusTip(self.tr(\"Salva a imagem do canvas\"))\n\t\tself.connect(self.saveAct,SIGNAL(\"triggered()\"),self.glwidget.saveElvisfile)\n\t\t\n\t\tself.exportAct = QtGui.QAction(self.tr(\"&Exportar SVG\"),self)\n\t\tself.exportAct.setShortcut(self.tr(\"Ctrl+E\"))\n\t\tself.exportAct.setStatusTip(self.tr(\"Exporta para formato SVG\"))\n\t\tself.connect(self.exportAct,SIGNAL(\"triggered()\"),self.glwidget.ExportSVG)\n\t\t\t\t\n\t\t\n\t\tself.exitAct = QtGui.QAction(self.tr(\"&Sair\"),self)\n\t\tself.exitAct.setStatusTip(self.tr(\"Sair do programa\"))\n\t\tself.connect(self.exitAct,SIGNAL(\"triggered()\"),self.close)\n\t\t\n\t\n\t\tself.aboutAct = QtGui.QAction(self.tr(\"&Sobre\"),self)\n\t\tself.aboutAct.setStatusTip(self.tr(\"Sobre o programa\"))\n\t\tself.connect(self.aboutAct,SIGNAL(\"triggered()\"),self.about)", "def _create(self):\n if self.h >= 2:\n # Draw standard shape\n for i in range(1, self.h - 1):\n self.window.addch(i, 0, curses.ACS_VLINE | self.colour) # '|'\n\n # Draw scrolling bar if necessary\n if self.size > 0:\n end = min(self.pos + self.size, self.h)\n for i in range(self.pos, end):\n self.window.addch(i, 0, chr(0x2588), self.colour) # '█'\n\n # Draw arrows if necessary\n if self.counter > 0:\n self.window.addch(0, 0, chr(0x25B2), self.colour) # '▲'\n if self.counter < self.content_size - self.h:\n self.window.addch(self.h - 1, 0, chr(0x25BC), self.colour) # '▼'\n\n # Finally refresh window\n self.window.refresh()", "def set_treeview_style(tv: ttk.Treeview, main_folder_name: str):\n # tv.tag_configure(\"same\", background=\"#99B898\", foreground=\"white\")\n tv.tag_configure(\"lo\", background=\"yellow\", foreground=\"black\")\n tv.tag_configure(\"ro\", background=\"yellow\", foreground=\"black\")\n tv.tag_configure(\"dif\", background=\"red\", foreground=\"black\")\n tv.tag_configure(\"funny\", background=\"#2A363B\", foreground=\"white\")\n # tv.tag_configure(\"sub\", background=\"#2A363B\", foreground=\"white\")\n tv.heading(\"#0\", text=main_folder_name)\n style = ttk.Style()\n style.configure(\"Treeview.Heading\", font=(None, 16))", "def start():\r\n window = loop_menuDolar()\r\n window.close()", "def switch_style(self):\n self.game_data.set_style(next_style[self.game_data.get_style()])\n self.settings_buttons[0].set_text(get_style_name(self.game_data.get_style()))", "def create(self):\n # Positioning background and pointer indicator for main menu\n self.surface.blit(self.main_menu_background, (0, 0))\n self.surface.blit(self.main_menu_greets, self.main_menu_greets_position)\n self.show_mouse_position_with_px()\n self.main_menu_buttons()", "def OnSwitchTab(self, event):\n canvas_HDV = self.dicom_navigation.parent.dicom_right_window.top_info.canvas_HDV\n canvas_dicom = self.dicom_navigation.parent.dicom_right_window.top_info.canvas_dicom\n \n if (self.dicom_navigation.display_settings['miniature'] == 1):\n canvas_HDV.get_tk_widget().pack_forget()\n canvas_dicom.get_tk_widget().pack(side=tk.RIGHT, fill=tk.Y, expand=False)\n # Tricky hack pour ne pas avoir le probleme de zoom lorsqu'on met les mignatures (on retrace les canvas initiaux)\n self.dicom_navigation.parent.dicom_right_window.dicom_hdv.canvas.get_tk_widget().update_idletasks()\n self.dicom_navigation.parent.dicom_right_window.dicom_view.canvas.get_tk_widget().update_idletasks()", "def menuBar(self):\n\n\t\tmenu = Menu(self.master)\n\t\tself.master.config(menu=menu)\n\n\t\t#File Menu\n\t\tfyle = Menu(menu)\n\t\tfyle.add_command(label='New',command=self.newPad)\n\t\tfyle.add_command(label='Open File',command=self.open_file)\n\t\tfyle.add_command(label='Save', command=self.saveFile)\n\t\tfyle.add_command(label='Save As',command=self.saveFileAs)\n\t\tfyle.add_command(label='Exit', command=outer.destroy)\n\t\tmenu.add_cascade(label='File',menu=fyle)\n\n\t\t#Edit Menu\n\t\tedit = Menu(menu)\n\t\tedit.add_command(label='Cut')\n\t\tedit.add_command(label='Copy')\n\t\tedit.add_command(label='Paste')\n\t\tedit.add_command(label='Undo')\n\t\tedit.add_command(label='Redo')\n\t\tmenu.add_cascade(label='Edit',menu=edit)\n\n\t\t#View Menu\n\t\tview = Menu(menu)\n\t\tview.add_command(label='Line Numbers')\n\t\tmenu.add_cascade(label='View', menu=view)\n\n\n\t\t#Help Menu\n\t\thelp = Menu(menu)\n\t\thelp.add_command(label='About')\n\t\tmenu.add_cascade(label='Help',menu=help)", "def build_assets(self):\n theme = self.theme\n \n # ~ self.assets_dir = cwd + \"/CenterSide_Themes/\" + theme + \"/\"\n \n \n \n \n \n \n # ~ self.blank_langmssg = QPixmap(\"blank_langmssg.svg\")\n # ~ self.blank_thememssg = QPixmap(\"blank_thememssg.svg\")\n \n \n \n \n \n # ~ self.icon_info = QIcon(\"Icons/info.svg\")\n # ~ self.icon_intructions = QIcon(\"Icons/instructions.svg\")\n # ~ self.icon_internet = QIcon(\"Icons/internet.svg\")\n # ~ self.icon_invite = QIcon(\"Icons/invite.svg\")\n # ~ self.icon_languages = QIcon(\"Icons/languages.svg\")\n # ~ self.icon_local = QIcon(\"Icons/local.svg\")\n # ~ self.icon_message = QIcon(\"Icons/message.svg\")\n # ~ self.icon_name = QIcon(\"Icons/name.svg\")\n # ~ self.icon_options = QIcon(\"Icons/options.svg\")\n # ~ self.icon_palettes = QIcon(\"Icons/palettes.svg\")\n \n # ~ self.icon_quit = QIcon(\"Icons/quit.svg\")\n # ~ self.icon_refresh = QIcon(\"Icons/refresh.svg\")\n # ~ self.icon_shop = QIcon(\"Icons/shop.svg\")\n # ~ self.icon_soundon = QIcon(\"Icons/soundon.svg\")\n # ~ self.icon_soundoff = QIcon(\"Icons/soundoff.svg\")\n # ~ self.icon_vsAI = QIcon(\"Icons/vsAI.svg\")", "def _create_menu(self):\n menubar = Menu(self.root)\n # Game dropdown menu\n gamemenu = Menu(menubar, tearoff=0)\n gamemenu.add_command(\n label='New Game', command=self.new_game)\n gamemenu.add_separator()\n gamemenu.add_command(label='Exit', command=self.root.quit)\n menubar.add_cascade(label='Game', menu=gamemenu)\n\n # Options dropdown menu\n optionmenu = Menu(menubar, tearoff=0)\n optionmenu.add_command(label='Scattershot', command=self.game.spray)\n optionmenu.add_command(label='All Ships Attack', command=None)\n menubar.add_cascade(label='Options', menu=optionmenu)\n\n # Help dropdown menu\n helpmenu = Menu(menubar, tearoff=0)\n helpmenu.add_command(label='Help Index', command=None)\n helpmenu.add_command(label='About...', command=None)\n menubar.add_cascade(label='Help', menu=helpmenu)\n\n self.root.config(menu=menubar)", "def Crearmenu():\n layoutM = [\n [sg.T(\"ScrabbleAR\", size=(16, 1), justification=\"center\",\n font=(\"Times New Roman\", 25))],\n [sg.T(\" Bienvenido a ScrabbleAR!, el juego donde \")],\n [sg.T(\" hay que armar palabras para ganar \")],\n [sg.B(\"Iniciar nuevo juego\", size=(17, 1), key=\"inicio\"),\n sg.B(\"Configuracion\", size=(17, 1), key=\"config\")],\n [sg.B(\"Puntuaciones\", size=(17, 1), key=\"puntos\"),\n sg.B(\"Salir\", size=(17, 1), key=\"exit\")]\n ]\n\n if(os.path.isfile(\"Guardado.json\")):\n layoutM += [[sg.B(\"Continuar partida\", size=(36, 1), key=\"continue\")]]\n\n window = sg.Window(\"ScrabbleAR - Menu\", layoutM)\n\n return window", "def paintScreen(self):\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(BACKGROUND_LEFT)\n self.imgBackgroundLeft = guiobjects.OcempImageMapTransparent(imgPath)\n self.window.add_child(self.imgBackgroundLeft)\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(BACKGROUND_RIGHT)\n imgBackgroundRight = guiobjects.OcempImageMapTransparent(imgPath)\n imgBackgroundRight.topleft = 297, 0\n self.window.add_child(imgBackgroundRight)", "def setup(self):\n self.ui_manager.purge_ui_elements()\n\n button = buttons.MenuButton(\n 'Menu',\n center_x=self.window.width // 2,\n center_y=self.window.height // 9,\n width=200,\n user=self.user.text\n )\n button.set_style_attrs(\n bg_color_hover=(159, 255, 233),\n bg_color_press=(51, 201, 166),\n )\n self.ui_manager.add_ui_element(button)", "def extend_ui(self):\n for name, tab in self.build_general_tabs().items():\n scroll = self.build_tab()\n self.add_tab(\"General\", name, scroll)\n self.fill_tab(\"General\", name, tab)\n for name, tab in self.build_display_tabs().items():\n scroll = self.build_tab()\n self.add_tab(\"Display\", name, scroll)\n self.fill_tab(\"Display\", name, tab)\n for name, tab in self.build_data_tabs().items():\n scroll = self.build_tab()\n self.add_tab(\"Data\", name, scroll)\n self.fill_tab(\"Data\", name, tab)", "def on_category(self):\n super(ToolSettings, self).on_category()\n selItems = self.tw_category.selectedItems() or []\n #--- Build Tree ---#\n if selItems:\n if hasattr(selItems[0], 'itemWidget'):\n if selItems[0].itemWidget is not None:\n if not selItems[0].itemWidget.__edited__:\n selItems[0].itemWidget._initWidget()\n selItems[0].itemWidget.buildTree()" ]
[ "0.5896766", "0.56492335", "0.5608493", "0.5590127", "0.55801094", "0.54546154", "0.54343575", "0.5407063", "0.540511", "0.5388838", "0.5362931", "0.5360915", "0.53588575", "0.5346011", "0.5341834", "0.5332074", "0.5331664", "0.53218496", "0.52839094", "0.52213657", "0.5196463", "0.51943153", "0.5191199", "0.5189978", "0.5167998", "0.5164351", "0.5164351", "0.5158457", "0.5145621", "0.51405257", "0.51400214", "0.51390845", "0.51256496", "0.5120095", "0.50976187", "0.50921655", "0.5089395", "0.5085907", "0.5067781", "0.50582075", "0.5054446", "0.5035085", "0.49989817", "0.49914172", "0.49910626", "0.49831098", "0.497907", "0.49670935", "0.495886", "0.49450347", "0.49344885", "0.4925238", "0.49177283", "0.49118692", "0.49037665", "0.49032027", "0.4895658", "0.48820254", "0.487202", "0.4870377", "0.4865319", "0.48631302", "0.48616984", "0.48530897", "0.48486018", "0.48483914", "0.48382607", "0.48375294", "0.48370194", "0.48368037", "0.48233208", "0.48218971", "0.48173803", "0.48173803", "0.48046237", "0.48037177", "0.480144", "0.47986487", "0.47910178", "0.47901812", "0.47839686", "0.47837704", "0.47818887", "0.47676072", "0.47667658", "0.47493795", "0.47439742", "0.47419357", "0.47394046", "0.4738153", "0.4722905", "0.47207293", "0.47157654", "0.47154355", "0.47145966", "0.47114706", "0.4700201", "0.46958894", "0.46940228", "0.46919787" ]
0.75262403
0
draws all of the children
def draw(self,children): self.clip = [(0,0,gui._width+100, gui._height+100)] self.drawer.setClip(0,0,gui._width+100, gui._height+100) self.drawer.begin() z = 0 for child in reversed(children): z += 1 self.drawChild(0,0,z,child) self.drawer.end()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw(self, force=False):\n for child in self.children.values():\n child.draw(force)", "def draw(self):\n for obj in self.objects:\n obj.draw()", "def draw(self):\n for tree_idx, tree in enumerate(self.trees):\n print(\"==========================================\\nTree\",\n tree_idx)\n self._print_tree(tree)", "def draw(self):\n pos = Point([2,2])\n\n if self.classes == None:\n classes = self.lumpy.get_class_list()\n else:\n classes = [make_thing(self.lumpy, cls) for cls in self.classes]\n\n # find the classes that have no parents, and find the\n # height of each tree\n roots = [c for c in classes if c.parents == []]\n for root in roots:\n root.set_height()\n\n # for all the leaf nodes, compute the distance to\n # the parent\n leafs = [c for c in classes if c.childs == []]\n for leaf in leafs:\n leaf.set_depth()\n\n # if we're drawing all the classes, start with the roots;\n # otherwise draw the classes we were given.\n if self.classes == None:\n drawn = self.draw_classes(roots, pos)\n else:\n drawn = self.draw_classes(classes, pos)\n \n self.draw_arrows()\n\n # configure the scroll region\n bbox = Canvas.bbox(self.canvas, ALL)\n self.canvas.configure(scrollregion=bbox)", "def draw(self):\n\n for item in self.vis:\n item.undraw()\n self.render()\n for item in self.vis:\n item.draw(self.win)\n self.drawn = True", "def on_draw(self):\n # draw everything", "def draw(self):\n for section in self.sections:\n canvas_reset(self.canvas)\n section.draw(self.canvas)", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self, projection, view, model):\n for child in self.children:\n child.draw(projection, view, model @ self.transform)", "def drawAll(self):\r\n for x in range(len(self.model)):\r\n self.model[x].draw()", "def draw(self, parent, cr):\n for x, y in self.get_block_coords():\n parent.draw_block_element(cr, x, y)", "def drawtree(self):\r\n\r\n Phylo.draw(self.tree)", "def drawChild(self,x,y,z,thing):\n self.z = z\n \n \n \n if not thing.visable:\n return \n \n \n self.color = Vec4(*thing.color)\n \n realX = x+float(thing._x)\n realY = y+float(thing._y)\n \n if thing.style:\n style = gui.theme.define(thing.style)\n if style:\n style.draw(\n self,\n (realX,realY),\n (float(thing._width),float(thing._height)))\n \n if thing.clips:\n # set clip stuff\n self.pushClip(realX,realY,realX+thing._width,realY+thing._height)\n \n if thing.icon:\n rect = self.atlas.getRect(thing.icon)\n if rect: \n self.color = thing.color\n u,v,us,vs = rect\n self.rectStreatch((realX,realY,us,vs),(u,v,us,vs))\n \n if thing.text:\n # draw text stuff\n if thing.editsText:\n self.drawEditText(\n gui.theme.defineFont(thing.font),\n thing.text,\n realX,\n realY,\n thing.selection,\n thing.caret)\n else:\n self.drawText(\n gui.theme.defineFont(thing.font),\n thing.text,\n realX,\n realY)\n \n \n if thing.children:\n for child in thing.children:\n z += 1\n self.drawChild(realX,realY,z,child)\n \n if thing.clips:\n self.popClip()", "def drawall(self):\r\n for x in self.objectlist:\r\n if x.model:\r\n x.model.draw()", "def getChildren():", "def draw(self):\n self._group.set_state_recursive()\n self._vertex_list.draw(self._draw_mode)\n self._group.unset_state_recursive()", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self):\n pass", "def draw_tree(self):\n\n print \"--- \" + str(self.name)\n \n def draw_child_tree(current, depth):\n \n for c in current.children:\n print depth * \" \" + \"|-- \" + str(c.name)\n if hasattr(c, 'children'):\n draw_child_tree(c, depth + 1)\n \n draw_child_tree(self, 1)\n \n return", "def draw(self):", "def draw(self):\n draw(self.graph)", "def on_draw( self ):\r\n self.clear()\r\n self.setup_3D()\r\n print \"DEBUG:\" , \"There are\" , len( self.renderlist ) , \"items in 'self.renderlist'\"\r\n for obj in self.renderlist:\r\n obj.draw()", "def draw(self, parent, cr):\n for y, row in enumerate(self.matrix):\n for x, cell in enumerate(row):\n if cell:\n parent.draw_block_element(cr, x, y)", "def _drawFrame(self):\n\n self._clearScreen()\n \n for object in Object.Objects:\n self._drawObject(object)\n\n for entity in Entity.Entities:\n self._drawObject(entity)\n\n self._drawObject(Game.Player)", "def on_draw(self):\n\n # clear the screen to begin drawing\n arcade.start_render()\n\n # TODO: draw each object\n self.ship.draw()\n for asteroid in self.asteroid_array:\n asteroid.draw()\n\n for bullet in self.bullets_list:\n bullet.draw()", "def draw():", "def draw(self):\n\t\tpass", "def draw(self):\n self._view.clear()\n for h in range(len(self._bricks)):\n self._bricks[h].draw(self._view)\n \n self._paddle.draw(self._view)\n \n for w in self._FP_list:\n w.draw(self._view)\n \n # draw ball if not None\n if not self._ball is None:\n self._ball.draw(self._view)", "def Rec_Draw_Tree( self, cur_node, xs, ys ):\r\n yhold = [] #holds the y values of the children\r\n ypos1 = 0 #the yvalue of the current node\r\n ypos = 0\r\n new_xstart = cur_node.data.length * cb.xtick + xs\r\n #for each child of the current node\r\n for i in range( len( cur_node.sub ) ):\r\n #current node is to be drawn before the (cb.order)-th child\r\n if ( i == cb.order ):\r\n ypos1 = self.Draw_Node( cur_node, xs, ys )\r\n if( cb.order == 1 ):\r\n ypos1 = ys\r\n ys = ypos1 + cb.ytick\r\n if( len( cur_node.sub[i].sub ) == 0 ):#Draw a leaf\r\n ypos = self.Draw_Node( cur_node.sub[i], new_xstart, ys )\r\n yhold.append( int(ypos) )\r\n else: #Draw an internal node\r\n ys, ypos = self.Rec_Draw_Tree( cur_node.sub[i], new_xstart, ys )\r\n yhold.append( ypos )\r\n if( i < len( cur_node.sub ) - 1 ):\r\n ys = ys + cb.ytick\r\n if ( cb.order != 1 and cb.order == len( cur_node.sub ) ):\r\n ypos1 = self.Draw_Node( cur_node, xs, ys )\r\n elif( cb.order == 1 and cb.order == len( cur_node.sub) ):\r\n ypos1 = self.Draw_Node( cur_node, xs , ys+cb.ytick )\r\n ypos1 = ypos1 - cb.ytick\r\n\r\n #draw the vertical lines to the children\r\n for item in yhold:\r\n self.canvas_one.create_line( new_xstart, item, new_xstart, ypos1, width = 3, fill=self.branch_color )\r\n #return the farthest vertical position drawn and the position of the line of the current segment\r\n return ys, ypos1", "def draw(self):\n\t\tfor i in range(0, self.size):\n\t\t\tprint('\\n' + \"----\" * self.size)\n\t\t\tfor j in range(0, self.size):\n\t\t\t\tprint(self.grid[i][j] + ' |', end=\" \")\n\t\tprint('\\n'+ \"----\" * self.size + '\\n')", "def draw( self ):\n\n if self.__drawnGrid == 0:\n draw_grid().draw()\n\n self.__drawnGrid = 1\n\n column = 0\n row = 0\n i = 0\n for mark in self.__grid:\n if row == 0:\n turtle.goto(-60+60*column, 60)\n elif row == 1:\n turtle.goto(-60+60*column, 0)\n elif row == 2:\n turtle.goto(-60+60*column, -60)\n\n if isinstance(mark, str):\n if mark.lower() == 'x': \n drawX(i)\n elif mark.lower() == 'o':\n drawO(i)\n\n column += 1\n\n if column == 3:\n column = 0\n row += 1\n\n i+=1\n\n turtle.goto(-60, 60)", "def draw(self):\n if self.state == 'alive':\n for i in range(len(self.tail)):\n pygame.draw.rect(display, black, (squareToXPix(self.tail[-(i + 1)][0], objectSize), squareToYPix(self.tail[-(i + 1)][1], objectSize), objectSize, objectSize))\n\n pygame.draw.rect(display, black, (squareToXPix(self.x, objectSize), squareToYPix(self.y, objectSize), objectSize, objectSize))\n\n else:\n for i in range(len(self.tail)):\n pygame.draw.rect(display, red, (squareToXPix(self.tail[-(i + 1)][0], objectSize), squareToYPix(self.tail[-(i + 1)][1], objectSize), objectSize, objectSize))\n\n pygame.draw.rect(display, red, (squareToXPix(self.x, objectSize), squareToYPix(self.y, objectSize), objectSize, objectSize))", "def draw(self):\n \n # Draw the background\n self.world.fill(BLUE)\n \n # Draw all the sprite lists that we have\n self.wall_list.draw(self.world)\n self.enemy_list.draw(self.world)\n self.sludge.draw(self.world)\n self.consumeable.draw(self.world)\n self.can_climb.draw(self.world)", "def draw(self, diag, pos, flip, tags=tuple()):\n if self.isdrawn():\n return []\n\n self.drawn = True\n self.diag = diag\n self.canvas = diag.canvas\n\n # keep track of how many things have been drawn.\n # Simple values can get drawn more than once, so the\n # total number of things drawn can be greater than\n # the number of things.\n Thing.things_drawn += 1\n if Thing.things_drawn % 100 == 0:\n print Thing.things_drawn\n #self.diag.lumpy.update()\n\n # each thing has a list of tags: its own tag plus\n # the tag of each thing it belongs to. This convention\n # makes it possible to move entire structures with one\n # move command.\n self.tags = make_tags(self.__class__.__name__)\n tags += self.tags\n\n # invoke drawme in the child class\n drawn = self.drawme(diag, pos, flip, tags)\n if drawn == None:\n drawn = [self]\n \n self.set_offset(pos)\n return drawn", "def on_draw(self):\n\n # Clear the screen and start drawing\n arcade.start_render()\n\n # Draw the rectangles\n for shape in self.shapes:\n shape.draw()", "def _draw_players(self):\n for player in self.players:\n player.draw()", "def on_draw(self):\n\n # clear the screen to begin drawing\n arcade.start_render()\n\n # draw each object\n self.ball.draw()\n self.paddle.draw()\n\n self.draw_score()", "def drawBoard(self):\r\n self.outer.draw(self.surface)\r\n self.background.draw(self.surface)\r\n for point in self.points:\r\n point.draw(self.surface)\r\n point.drawCheckers(self.surface)\r\n self.dice.draw(self.surface)\r\n self.message.draw(self.surface)\r\n self.checkerBox.draw(self.surface)\r\n self.checkerBox.drawCheckers(self.surface)\r\n for bar in self.bar:\r\n bar.draw(self.surface)\r\n bar.drawCheckers(self.surface)\r\n pygame.display.flip()", "def draw_particles(self):\n for particle in self.particles:\n particle.draw()", "def draw(self, surface):\n for molecule in self.molecules:\n molecule.draw(surface)", "def draw(self):\n self.screen.fill(WHITE)\n self.color_invalid()\n self.draw_selected()\n self.shade_locked_cells()\n self.draw_grid()\n self.draw_buttons()\n self.draw_numbers()", "def draw(self, surface):\n for box in self.checkboxes:\n box.draw(surface)", "def draw_nodes(self):\n pass", "def draw(self, **kwargs):\n for o in sorted(self._drawables, key=default_itemgetter(\"z\", default=0)):\n o.draw(**kwargs)", "def draw(self):\n for x in range(self.numRows):\n print self.grid[x]", "def draw(self):\n self.drawLine()\n\n for l in range(0, self.height):\n print(\"|\", end='', flush=True)\n for c in range(0, self.width):\n print(\" \" + str(self.grid[l][c]) + \" |\", end='', flush=True)\n print(\"\\n\", end='', flush=True)\n\n self.drawLine()", "def on_draw(self):\r\n\r\n \r\n # clear the screen to begin drawing\r\n arcade.start_render()\r\n\r\n background = arcade.load_texture(\"gala.png\")\r\n arcade.draw_texture_rectangle(SCREEN_WIDTH/2, SCREEN_HEIGHT/2,SCREEN_WIDTH , SCREEN_HEIGHT, background) \r\n \r\n\r\n for asteriod in self.rocks:\r\n asteriod.draw()\r\n \r\n # for asteriod in self.rockss:\r\n # asteriod.draw()\r\n\r\n # for asteriod in self.rocksss:\r\n # asteriod.draw() \r\n \r\n for bullet in self.bullets:\r\n bullet.draw()\r\n \r\n \r\n self.ship.draw()\r\n \r\n \r\n # TODO: draw each object\r", "def draw(self):\n\n self.squares.draw(self.screen)\n if not self.hide_grid:\n self.draw_grid()\n self.fleas.draw(self.screen)\n pygame.display.flip()", "def draw(self, window):\n super().draw(window)\n self.health_bar(window)\n for bullet in self.bullets:\n bullet.draw(window)", "def draw(self):\n self.batch.draw()", "def on_draw(self):\n\t\tself.render()", "def on_draw(self):\n\t\tself.render()", "def paint(self):\r\n self.canvas.delete(tkinter.ALL)\r\n self.visit(self.tree.root)", "def draw(self):\n #for (x, y) in self.coords:\n # pyxel.rect(\n # (x + self.x) * 4,\n # (y + self.y) * 4,\n # (x + self.x) * 4 + 3,\n # (y + self.y) * 4 + 3,\n # self.color)", "def on_draw(self):\r\n\r\n # clear the screen to begin drawing\r\n arcade.start_render()\r\n\r\n # draw each object\r\n self.rifle.draw()\r\n\r\n for bullet in self.bullets:\r\n bullet.draw()\r\n\r\n # TODO: iterate through your targets and draw them...\r\n for target in self.targets:\r\n target.draw()\r\n\r\n self.draw_score()\r\n # if the score is less than -30, game over!\r\n if self.score <= -30:\r\n self.draw_game_over()\r\n arcade.finish_render()", "def redraw(self):\n offset = self.actual_row * self.row_size\n x = 5\n y = 5\n ind = 0\n self.scene.clear()\n for _ in range(self.column_size):\n for _ in range(self.row_size):\n if ind+offset < len(self.cards):\n self.draw_card(x, y, ind+offset)\n x += 90\n ind += 1\n x = 5\n y += 120", "def draw_buttons(self):\n for button in self.playing_buttons:\n button.draw(self.screen)", "def draw(self, screen):\n for branch_points in self.branches:\n pygame.draw.polygon(screen, self.branch_color, branch_points)\n for bottom_points in self.bottom:\n pygame.draw.polygon(screen, self.bottom_color, bottom_points)", "def draw(self):\n if self.dirty:\n self._render()\n for text in self.text_lines:\n text.draw()", "def on_draw(self):\n self.clear()\n self.manager.draw()", "def draw(self, view):\n for r in self._aliens:\n for alien in r:\n if alien != None:\n alien.draw(view)\n if self._ship != None:\n self._ship.draw(view)\n self._dline.draw(view)\n for bolt in self._bolts:\n bolt.draw(view)", "def draw(self):\n self.screen.fill((0,51,102))\n # get the new drawables\n self.drawables = (self.game_model.get_background_drawables()\n + self.game_model.get_plane_drawables()\n + self.game_model.get_bullet_drawables()\n + self.game_model.get_enemy_drawables())\n for d in self.drawables:\n rect = d.get_rect()\n surf = d.get_surface()\n surf.set_colorkey((255,255,255))\n self.screen.blit(surf, rect)", "def _drawOnCanvas(self):\n self.canvas=np.ones(self.canvas.shape,dtype=np.uint8)*255\n for key in self.elements:\n graphElement=self.elements[key]\n graphElement.draw(self.canvas)\n self.sync=True", "def draw(self):\r\n if len(self.matchPileManager.piles) > 0:\r\n print \"Match Piles\\r\"\r\n for matchPile in self.matchPileManager.piles:\r\n cardListView = CardListView(matchPile.cards)\r\n print \"{0}\\r\".format(cardListView.draw())", "def draw(self, view):\n for i in self.get_aliens():\n for n in i:\n if not n is None:\n n.draw(view)\n self.get_ship().draw(view)\n self.get_ship().get_linevalue().draw(view)\n for i in self.get_PU():\n i.draw(view)\n for i in self.get_bolts():\n i.draw(view)", "def draw(self):\n\t\tself.screen.fill(pygame.Color('black'))\n\t\tfor column in self.model.blocks:\n\t\t\tfor block in column:\n\t\t\t\tr = pygame.Rect(block.left,\n\t\t\t\t\t\t\t\tblock.top,\n\t\t\t\t\t\t\t\tblock.size,\n\t\t\t\t\t\t\t\tblock.size)\n\t\t\t\tpygame.draw.rect(self.screen, block.color,r)\n\t\tpygame.display.update()", "def draw_buildings(self):\n # for each building in list1 draw it on the screen", "def draw(self):\r\n self.screen.fill(self.color_bg) \r\n for t in self.thumbs: t.draw(self.screen) \r\n pygame.display.flip()\r\n self.clock.tick(60)", "def draw_children(self, node, content):\r\n\r\n # If the node has a left child..\r\n if node.leftchild:\r\n # Print key of node.object, followed by an arrow, followed by the key of leftchild.object, followed by a nl.\r\n content += \"\\\"\" + str(node.object.key) + \"\\\" -> \\\"\" + str(node.leftchild.object.key) + \"\\\";\"\r\n content += \"\\n\"\r\n # If this node has children, call recursively\r\n if node.leftchild.leftchild or node.leftchild.rightchild:\r\n content = self.draw_children(node.leftchild, content, )\r\n\r\n # Right child works the same\r\n if node.rightchild:\r\n content += \"\\\"\" + str(node.object.key) + \"\\\" -> \\\"\" + str(node.rightchild.object.key) + \"\\\";\"\r\n content += \"\\n\"\r\n if node.rightchild.leftchild or node.rightchild.rightchild:\r\n content = self.draw_children(node.rightchild, content, )\r\n return content", "def redrawAll(self):\n self.canvas.delete(ALL)\n self.gridBG = []\n self.gridBusy = []\n for row in range(self.rows):\n self.gridBG.append([])\n self.gridBusy.append([])\n for col in range(self.cols):\n self.gridBG[row].append(self.drawCell(row, col, self.colors['idle'], bgPattern=\"gray75\"))\n self.gridBusy[row].append(0)\n for row in range(self.rows + 1):\n self.canvas.create_line(\n self.margin,\n self.margin + row * self.cellSize,\n self.margin + self.cols * self.cellSize,\n self.margin + row * self.cellSize,\n dash=(self.dashBlack, self.dashWhite)\n )\n for col in range(self.cols + 1):\n self.canvas.create_line(\n self.margin + col * self.cellSize,\n self.margin,\n self.margin + col * self.cellSize,\n self.margin + self.rows * self.cellSize,\n dash=(self.dashBlack, self.dashWhite)\n )", "def draw(self):\n self.screen.fill(BACKGROUND_COLOR)\n self.cannon.draw(self.screen)\n self.objects.draw(self.screen)", "def draw(self):\n self._vertex_list.draw(self._draw_mode)", "def renderall(self):\n\n if not self.isinitialized:\n return\n # clear display\n self.screen.fill(BGCOLOR)\n # draw the board\n self.drawBoard()\n # flip the display to show whatever we drew\n pygame.display.flip()", "def on_draw(self):\n self.clear()\n self.gamestatemanager.peek().on_draw(self.get_size())", "def draw(self):\n\n self.updateLazyImageLoading()\n\n image(self.baseMap, 0, 0)\n\n for layer in self.layers:\n layer.draw()\n\n for marker in self.markers:\n marker.draw()", "def updateDraw(self):\r\n self.delConns()\r\n self.delTags()\r\n self.drawConns()\r\n self.drawTags()", "def draw_arrows(self):\n for arrow in self.arrows:\n arrow.draw(self)", "def draw(self):\n with self:\n self.batch.draw()", "def draw_trees(*trees):\n TreeView(*trees).mainloop()\n return", "def redraw(self):\n\n # First remove all items from group.\n for child in self.childItems():\n self.removeFromGroup(child)\n\n # It converts the SVG vector information to QItems.\n svg = self.generateSVG()\n\n item = True\n while item:\n # Goes through each SVG item and depending on the type,\n # extracts different attributes from it and creates the\n # QItem.\n item = svg[svg.find('<')+1 : svg.find('>')]\n if item == '':\n break\n svg = svg[svg.find('>')+1:]\n\n name = item.split(' ')[0]\n\n if name == 'line':\n QItem = self.canvas.scene.addLine(\n QtCore.QLineF(float(self.getSVGItemAttrValue(item, 'x1')),\n float(self.getSVGItemAttrValue(item, 'y1')),\n float(self.getSVGItemAttrValue(item, 'x2')),\n float(self.getSVGItemAttrValue(item, 'y2')))\n )\n\n elif name == 'rect':\n pass\n\n try:\n color = self.getSVGItemAttrValue(item, 'stroke')\n except IndexError:\n color = '#000000'\n QItem.setPen(QtGui.QColor(color))\n\n # Add the QItem to ourself so it is a part of the group.\n self.addToGroup(QItem)\n self.top()", "def draw_all_objects():\n\tglobal fuel_available\n\n\tbackground_module.draw_bg(win)\n\tbackground_module.draw_snow(win)\n\tobstacles_module.draw_obstacles(win)\n\tcoins_module.draw_coins(win)\n\tforeground_module.draw_fg(win)\n\n\tfor spark_object in effects_module.Coin_spark_effects.coin_effects_list:\n\t\tspark_object.draw(win)\n\tfor hit_effect_object in effects_module.Hit_effects.hit_effects_list:\n\t\thit_effect_object.draw(win)\n\n\tif num_of_lives == 0:\n\t\tplayer_module.player.y += 1\n\t\tplayer_module.propeller.draw(win)\n\t\tplayer_module.player.draw(win)\n\telif won_bool:\n\t\tplayer_module.draw_player(win, True)\n\telse:\n\t\tplayer_module.draw_player(win)\n\t\t\n\tbird_module.draw_bird(win)\n\tdynamic_obstacle_giftbox.draw_gift(win)\n\tdynamic_obstacle_olaf.draw_olaf(win)\n\tdynamic_obstacle_santa.draw_santa(win)\n\tdisplay_module.display_lives(win, num_of_lives)\n\tdisplay_module.draw_minimap(win,frame_count)\n\n\tif start_fuel:\n\t\tfuel_available -= 1\n\tfuel_available = display_module.fuel_bar.draw_fuel_bar(win, fuel_available, start_fuel)\n\n\tdisplay_module.draw_fuel(win)\n\tcursor.draw(win)", "def drawAllSprites(self):\n\n # disegno il labirinto\n self.walls.draw(self.scrollSurface)\n\n # disegno le monete\n self.coins.draw(self.scrollSurface)\n\n # disegno il giocatore\n self.player.draw(self.scrollSurface)\n\n # disegno i nemici\n self.enemies.draw(self.scrollSurface)\n\n # disegno le bombe\n self.bombs.draw(self.scrollSurface)\n\n # disegno le wall bombs\n self.wallBombs.draw(self.scrollSurface)\n\n # disegno i killer enemies\n self.enemyKillers.draw(self.scrollSurface)\n\n # disegno i ricaricatori del tempo\n self.timeReloaders.draw(self.scrollSurface)\n\n # disegno i greedy enemies\n self.greedyEnemies.draw(self.scrollSurface)\n\n # disegno i portali\n self.portals.draw(self.scrollSurface)\n\n # disegno i nemici che rendono invisibile il giocatore\n self.invisibilityPlayers.draw(self.scrollSurface)\n\n # disegno i proiettili del giocatore insieme allo sprite del bonus\n self.playerBullets.draw(self.scrollSurface)\n self.bonusPlayerBullets.draw(self.scrollSurface)\n\n # disegno i proiettili sparatu dai nemici\n self.shooterBullets.draw(self.scrollSurface)", "def generate_children(self):\n\n if self.children is not None:\n return\n\n print \"Generating children for %s (%s rows)\" % (self.bbox, self.count)\n\n self.children = [QuadtreeNode(self.tree, b)\n for b in self.bounds.get_children()]\n\n with utils.msgpack_open(self.source_filename) as f:\n with utils.msgpack_open(self.children[0].source_filename, \"w\") as self.children[0].file:\n with utils.msgpack_open(self.children[1].source_filename, \"w\") as self.children[1].file:\n with utils.msgpack_open(self.children[2].source_filename, \"w\") as self.children[2].file:\n with utils.msgpack_open(self.children[3].source_filename, \"w\") as self.children[3].file:\n for row in f:\n for child in self.children:\n if self.tree.latitude_col in row and self.tree.longitude_col in row and child.bbox.contains(row[self.tree.longitude_col], row[self.tree.latitude_col]):\n child.file.write(row)\n child.count += 1\n break\n for child in self.children:\n del child.file\n\n return self.children", "def draw(self):\n raise NotImplementedError", "def draw(self):\n raise NotImplementedError", "def draw(self):\n raise NotImplementedError", "def draw(self):\n ui.clear()\n ui.draw_board(self)\n ui.output_buffer()", "def draw(self):\n if self.master != None :\n fill = Cell.FILLED_COLOR_BG\n outline = Cell.FILLED_COLOR_BORDER\n\n if not self.fill:\n fill = Cell.EMPTY_COLOR_BG\n outline = Cell.EMPTY_COLOR_BORDER\n walls[self.ord][self.abs] = 0\n else:\n walls[self.ord][self.abs] = 1\n\n\n xmin = self.abs * self.size\n xmax = xmin + self.size\n ymin = self.ord * self.size\n ymax = ymin + self.size\n self.master.create_rectangle(xmin, ymin, xmax, ymax, fill = fill, outline = outline)", "def draw(self, state):\n if state is None:\n state = self.model.current_state\n for row in range(len(self.model.maze.walls)):\n self.__draw_row_division()\n print(\" {0:2d} \".format(row), end='') # Imprime número da linha\n\n for col in range(len(self.model.maze.walls[0])):\n if self.model.maze.walls[row][col] == 1:\n print(\"|XXX\", end='') # Desenha parede\n elif self.model.goal_state.get_element(Coordinate(row, col)):\n if state.player.row == row and state.player.col == col:\n print(\"|G-P\", end='') # Desenha objetivo e jogador.\n elif state.get_element(Coordinate(row, col)):\n print(\"|G-B\", end='') # Desenha objetivo e caixa.\n else:\n print(\"| G\", end='') # Desenha objetivo\n elif state.player.row == row and state.player.col == col:\n print(\"| P\", end='') # Desenha jogador\n elif state.get_element(Coordinate(row, col)):\n print(\"| B\", end='') # Desenha caixa.\n else:\n print(\"| \", end='') # Desenha vazio\n print(\"|\")\n if row == (len(self.model.maze.walls) - 1):\n self.__draw_row_division()", "def draw_grid(self):\n self.screen.draw_many_tiles(tile for tile in self.iter_grid_tiles())\n pass", "def draw(self):\n\n self.state_stack.peek().draw(self.screen)" ]
[ "0.8111126", "0.7670635", "0.7543276", "0.7344588", "0.72956795", "0.7167588", "0.70602024", "0.70129067", "0.70129067", "0.70129067", "0.70129067", "0.70129067", "0.70129067", "0.70129067", "0.70129067", "0.69427055", "0.6871382", "0.68627614", "0.6847094", "0.67624927", "0.67384994", "0.6692316", "0.6666602", "0.6593313", "0.6593313", "0.6593313", "0.6593313", "0.6585276", "0.65645635", "0.65523356", "0.654422", "0.65425587", "0.6541673", "0.65398324", "0.65252906", "0.6508165", "0.64999455", "0.64917254", "0.64636254", "0.6458441", "0.643834", "0.64308965", "0.6426716", "0.6422526", "0.6419156", "0.6414137", "0.6414092", "0.6401082", "0.6392167", "0.6389453", "0.6372071", "0.6366304", "0.63657993", "0.6341786", "0.6333777", "0.6310467", "0.62970585", "0.629485", "0.6294204", "0.6281727", "0.6281727", "0.6253159", "0.6251108", "0.62357765", "0.6232653", "0.62206554", "0.62195766", "0.6214857", "0.6206916", "0.61949944", "0.6188572", "0.6181883", "0.6172529", "0.6155254", "0.6154353", "0.6154033", "0.6148258", "0.61341286", "0.61320543", "0.61314374", "0.6128884", "0.6120199", "0.61191094", "0.6117976", "0.61095256", "0.61029744", "0.61014354", "0.6088752", "0.60845006", "0.6078464", "0.60683566", "0.60647875", "0.6057437", "0.6057437", "0.6057437", "0.60457855", "0.60453516", "0.60174716", "0.60152847", "0.59985775" ]
0.82077765
0
draws a single thing
def drawChild(self,x,y,z,thing): self.z = z if not thing.visable: return self.color = Vec4(*thing.color) realX = x+float(thing._x) realY = y+float(thing._y) if thing.style: style = gui.theme.define(thing.style) if style: style.draw( self, (realX,realY), (float(thing._width),float(thing._height))) if thing.clips: # set clip stuff self.pushClip(realX,realY,realX+thing._width,realY+thing._height) if thing.icon: rect = self.atlas.getRect(thing.icon) if rect: self.color = thing.color u,v,us,vs = rect self.rectStreatch((realX,realY,us,vs),(u,v,us,vs)) if thing.text: # draw text stuff if thing.editsText: self.drawEditText( gui.theme.defineFont(thing.font), thing.text, realX, realY, thing.selection, thing.caret) else: self.drawText( gui.theme.defineFont(thing.font), thing.text, realX, realY) if thing.children: for child in thing.children: z += 1 self.drawChild(realX,realY,z,child) if thing.clips: self.popClip()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw():", "def draw( self, **kw ):\n pass", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self) -> Any:", "def draw(self):", "def draw(self):\n\t\tpass", "def game_draw(self):\n pass", "def draw(self, screen):", "def draw(self):\n if context.click():\n self.place()", "def draw(self): # called to draw this piece on the board\r\n \r\n pygame.draw.circle(screen, self.color, (self.x, self.y), 23)\r\n if self.isKing:\r\n screen.blit(king_text, (self.x-12, self.y-8))", "def draw(self):\n raise NotImplementedError", "def draw(self):\n raise NotImplementedError", "def draw(self):\n raise NotImplementedError", "def Draw(self):\n print ( 10*\"*\")\n print (\"Player \" + self.character + \" says:\")\n print (\"It's a Draw\")\n print ( 10*\"*\")", "def draw(self):\n if self.is_clicked:\n pg.draw.circle(self.window, self.color, (self.x, self.y), self.r, 0)\n else:\n pg.draw.circle(self.window, self.color, (self.x, self.y), self.r, 1)", "def draw(self, force = False):\n\t\tpass", "def fooddraw(foodx, foody):\n dis.blit(Food, (foodx, foody))\n pygame.display.update()", "def on_draw(self):\n # draw everything", "def paint(self, draw, x, y, w, h):\n\t\tpass", "def draw(self):\n if self.visible:\n glColor3f(self.r, self.g, self.b)\n graphicsBall(self.x, self.y, self.radius)\n\n if self.number <= 8:\n glColor3f(1.0, 1.0, 1.0)\n else:\n glColor3f(0.0, 0.0, 0.0)\n\n graphicsBall(self.x, self.y, self.radius / 2)\n\n if self.number > 0:\n if self.number > 8:\n glColor3f(1.0, 1.0, 1.0)\n else:\n glColor3f(0.0, 0.0, 0.0)\n\n if self.number < 10:\n graphicsText(self.x - 2, self.y - 3.5, str(self.number))\n else:\n graphicsText(self.x - 4.5, self.y - 3.5, str(self.number))", "def draw(self, thing):\n thing.draw(self, Point([2,2]), flip=1)\n\n # configure the scroll region\n bbox = Canvas.bbox(self.canvas, ALL)\n self.canvas.configure(scrollregion=bbox)", "def draw(self):\n if not self.pressed:\n #draw dialogue prompt\n arcade.draw_rectangle_filled(self.center_x, self.center_y, 20, 20, arcade.color.ALABAMA_CRIMSON)\n arcade.draw_text(\"!\", self.center_x, self.center_y, arcade.color.BLACK, anchor_x=\"center\", anchor_y=\"center\")\n else:\n #draw dialogue box\n arcade.draw_rectangle_filled(self.center_x, self.center_y, self.width, self.height, self.color)\n arcade.draw_text(self.text, self.center_x, self.center_y, arcade.color.BLACK, anchor_x=\"center\", anchor_y=\"center\")", "def draw(self):\n self.batch.draw()", "def draw(self, event):\n if self.flag == 0:\n xn = int(event.x / UNIT)\n yn = int(event.y / UNIT)\n if self.chart[xn, yn]:\n self.kill((xn, yn))\n else:\n self.givebirth((xn, yn))", "def on_draw(self, da, ctx):\n self.referee.get_current_state().draw(ctx)", "def draw(self):\n if not self.pressed:\n #draw info prompt in room\n arcade.draw_rectangle_filled(self.center_x, self.center_y, 20, 20, arcade.color.ANTIQUE_BRASS)\n arcade.draw_text(\"?\", self.center_x, self.center_y, arcade.color.BLACK, anchor_x=\"center\", anchor_y=\"center\")\n else:\n #draw info to top of screen when clicked\n arcade.draw_text(self.text, 10, settings.HEIGHT - 10, arcade.color.BLACK, anchor_x=\"left\", anchor_y=\"top\")", "def on_draw(self):\n\t\tself.render()", "def on_draw(self):\n\t\tself.render()", "def draw():\n screen.fill((0, 0, 0))\n alien.draw()", "def draw_me(self):\r\n\t\tself.image.fill((100, 200, 100))\r\n\t\tif self.active: pg.draw.rect(self.image, (100, 100, 200), self.frame, 3) #if active => draw frame around selected entity width 3\r\n\t\tself.display_surface.blit(self.image, self.rect)", "def draw(self):\n with self:\n self.batch.draw()", "def draw(self):\r\n self.__screen.draw_asteroid(self, self.__x, self.__y)", "def draw(self, draw_surface):\n if self.sub_event is not None:\n self.sub_event.draw(draw_surface)\n return\n\n # Draw info regarding how many of a given item the user will buy.\n self.how_many_dialogue.draw(draw_surface)\n draw_surface.blit(self.menu_frame, (128, 64))\n draw_surface.blit(self.cost_surf, end_at(self.cost_surf, (225, 84)))\n self.quantity_cursor.draw(draw_surface)", "def draw(self):\n\n super().draw()\n\n if self.hit or self.miss:\n # Change colour depending on hit or miss\n fl_color(FL_RED if self.hit else FL_WHITE)\n fl_pie(self.x()+4, self.y()+4, self.w() - 8, self.h() - 8, 0.0, 360.0)", "def draw_self(self):\n self.ball_sprite.draw()", "def draw(self, shape):\n shape.draw(shader=self.shader)", "def draw(self):\n #for (x, y) in self.coords:\n # pyxel.rect(\n # (x + self.x) * 4,\n # (y + self.y) * 4,\n # (x + self.x) * 4 + 3,\n # (y + self.y) * 4 + 3,\n # self.color)", "def draw(self):\n arcade.draw_xywh_rectangle_filled(\n self.x, self.y, self.width, self.height, self.fill.color\n )\n arcade.draw_xywh_rectangle_outline(\n self.x, self.y, self.width, self.height, self.pen.color, 3\n )", "def draw(self):\n self.vertex_list.draw(pyglet.gl.GL_QUADS)\n self.label.draw()", "def draw_actor(self, actor):\n actor.draw()", "def draw(self):\n return self._myCanvas.draw()", "def draw(self):\n arcade.draw_circle_filled(self.position_x, self.position_y, self.radius,self.player_color)", "def draw(self, *args, **kwargs):\n self.window.clear()\n self.batch.draw()", "def draw(self, view):\n self._wall.draw(view)\n self._paddle.draw(view)\n self._ball.draw(view)", "async def draw(self, ctx):\n if ctx.invoked_subcommand is None:\n await self.bot.send_cmd_help(ctx)", "def draw(self):\n self.screen.fill(Color.BLACK)\n self.screen.blit(self.red_block, self.apple)\n [self.screen.blit(self.green_block, xy) for xy in self.snake]\n self.screen.blit(self.white_bar, (0, 0))\n self.draw_text(str(self.score), self.score_pos, size=32)\n pygame.display.flip()", "def draw(self):\n arcade.draw_rectangle_outline(self.position_x, self.position_y, self.radius, self.color)", "def draw(screen):\n MY.restart_button.draw(screen)\n MY.display_text.draw(screen)", "def draw(self, force=False):\n self.display.draw(force)", "def draw(self):\n if self.state == 'alive':\n for i in range(len(self.tail)):\n pygame.draw.rect(display, black, (squareToXPix(self.tail[-(i + 1)][0], objectSize), squareToYPix(self.tail[-(i + 1)][1], objectSize), objectSize, objectSize))\n\n pygame.draw.rect(display, black, (squareToXPix(self.x, objectSize), squareToYPix(self.y, objectSize), objectSize, objectSize))\n\n else:\n for i in range(len(self.tail)):\n pygame.draw.rect(display, red, (squareToXPix(self.tail[-(i + 1)][0], objectSize), squareToYPix(self.tail[-(i + 1)][1], objectSize), objectSize, objectSize))\n\n pygame.draw.rect(display, red, (squareToXPix(self.x, objectSize), squareToYPix(self.y, objectSize), objectSize, objectSize))", "def add_draw(self, draw):\n self.draws.append(draw)", "def __draw(self, display, color, size):\n\t\tif self.walls[0]: # up\n\t\t\tpygame.draw.line(display, color, (self.col * size , self.row * size) , (self.col * size + size, self.row * size))\n\t\tif self.walls[3]: # down\n\t\t\tpygame.draw.line(display, color, (self.col * size + size, self.row * size + size), (self.col * size , self.row * size + size))\n\t\tif self.walls[1]: #left\n\t\t\tpygame.draw.line(display, color, (self.col * size + size, self.row * size) , (self.col * size + size, self.row * size + size))\n\t\tif self.walls[2]: #right\n\t\t\tpygame.draw.line(display, color, (self.col * size , self.row * size + size), (self.col * size , self.row * size))\n\n\t\tif self.current:\n\t\t\tdraw_rect_with_alpha(display, self.CURRENT_COLOR, Vector((self.col, self.row)) * size, (size, size))\n\n\t\telif self.backtracked and self.SHOW_BACKTRACK:\n\t\t\tdraw_rect_with_alpha(display, self.BACKTRACKED_COLOR, Vector((self.col, self.row)) * size, (size, size))\n\n\t\telif self.visited:\n\t\t\tdraw_rect_with_alpha(display, self.VISITED_COLOR, Vector((self.col, self.row)) * size, (size, size))", "def draw(self):\n for obj in self.objects:\n obj.draw()", "def draw(self):\n if self.type == 'Circle':\n pygame.draw.circle(self.screen, SIM_COLORS['blue'], \n (int(self._params[0]), int(self._params[1])), \n int(self._params[2]), \n 0)\n elif self.type == 'Line':\n pygame.draw.line(self.screen, SIM_COLORS['blue'],\n (int(self._params[0]), int(self._params[1])),\n (int(self._params[2]), int(self._params[3])), 5)\n elif self.type == 'Rect':\n pygame.draw.rect(self.screen, SIM_COLORS['blue'],\n (int(self._params[0]), int(self._params[1]),\n int(self._params[2]), int(self._params[3])) )", "def pre_draw(self):", "def draw(self):\r\n try:\r\n self.sprite.draw()\r\n except:\r\n return -1", "def draw_circle(self, draw_x, draw_y, player_one):\n if player_one:\n pygame.draw.circle(self.background, (0, 0, 0), (draw_x, draw_y), self.radius + 1)\n pygame.draw.circle(self.background, (self.red, 0, self.blue), (draw_x, draw_y), self.radius)\n pygame.draw.circle(self.background, (self.red, 100, self.blue + 100), (draw_x, draw_y), self.radius - 8)\n else:\n pygame.draw.circle(self.background, (0, 0, 0), (draw_x, draw_y), self.radius + 1)\n pygame.draw.circle(self.background, (self.red, 0, self.blue), (draw_x, draw_y), self.radius)\n pygame.draw.circle(self.background, (self.red + 100, 100, self.blue), (draw_x, draw_y), self.radius - 8)", "def draw_self(self, x, y):\n noStroke()\n fill(1.0, 0.5, 0.6)\n ellipse(x, y, 100, 100)\n bottom_half = createShape()\n bottom_half.beginShape()\n bottom_half.vertex(x, y)\n bottom_half.vertex(x+100, y)\n bottom_half.vertex(x+100, y+50)\n bottom_half.vertex(x+50, y+25)\n bottom_half.vertex(x, y+50)\n bottom_half.endShape()\n shape(bottom_half, -50, 0)\n\n self.eyes.display(x, y - 15, self.looking)", "def draw(self):\n self.screen.fill((0,51,102))\n # get the new drawables\n self.drawables = (self.game_model.get_background_drawables()\n + self.game_model.get_plane_drawables()\n + self.game_model.get_bullet_drawables()\n + self.game_model.get_enemy_drawables())\n for d in self.drawables:\n rect = d.get_rect()\n surf = d.get_surface()\n surf.set_colorkey((255,255,255))\n self.screen.blit(surf, rect)", "def draw_ball():\n\n draw_circle(ball, 'yellow')", "def draw(self):\n if self.node:\n if self.async:\n if self.cancel_draw:\n self.after_cancel(self.cancel_draw)\n self.cancel_draw = self.after(3, self._draw)\n else: self._draw()", "def draw(self):\n # IMPLEMENT ME\n \"\"\"\n GRectangle(x=GAME_WIDTH/2,y=GAME_HEIGHT/2,\n width=GAME_WIDTH,height=GAME_HEIGHT,\n fillcolor=introcs.RGB(0,0,0)).draw(self.view)\n if self.getState() == STATE_INACTIVE:\n self.getText().draw(self.view)\n if self.getState() == STATE_PAUSED:\n self.getText().draw(self.view)\n if not self.getWave() is None:\n self.getWave().draw(self.view)\n if self.getState() == STATE_COMPLETE:\n self.getText().draw(self.view)\n if self.getState() == STATE_PAUSED or self.getState() == STATE_ACTIVE or self.getState() == STATE_COMPLETE:\n self.getText().draw(self.view)\n\n GRectangle(x=GAME_WIDTH/2,y=GAME_HEIGHT/2,\n width=GAME_WIDTH,height=GAME_HEIGHT,\n fillcolor=introcs.RGB(0,0,0)).draw(self.view)\"\"\"\n if not self.getText() is None:\n self.getText().draw(self.view)\n if not self.getWave() is None:\n self.getWave().draw(self.view)", "def on_draw(self):\r\n\r\n \r\n # clear the screen to begin drawing\r\n arcade.start_render()\r\n\r\n background = arcade.load_texture(\"gala.png\")\r\n arcade.draw_texture_rectangle(SCREEN_WIDTH/2, SCREEN_HEIGHT/2,SCREEN_WIDTH , SCREEN_HEIGHT, background) \r\n \r\n\r\n for asteriod in self.rocks:\r\n asteriod.draw()\r\n \r\n # for asteriod in self.rockss:\r\n # asteriod.draw()\r\n\r\n # for asteriod in self.rocksss:\r\n # asteriod.draw() \r\n \r\n for bullet in self.bullets:\r\n bullet.draw()\r\n \r\n \r\n self.ship.draw()\r\n \r\n \r\n # TODO: draw each object\r", "def on_draw(self):\n self.clear()\n arcade.draw_text(\n \"Game Over - Click to restart\",\n SCREEN_WIDTH / 2,\n SCREEN_HEIGHT / 2,\n arcade.color.WHITE,\n 30,\n anchor_x=\"center\",\n )", "def on_draw(self):\n arcade.start_render()\n self.snake.draw()\n self.apple_list.draw()\n if self.state not in (2,3): #if not paused or ended, it shows score and lives\n stats_overlay(self.width, self.height-20, self.score, self.lives)\n if self.state == 2:\n pause_overlay(self.width//2, 7*self.height//12) #shows that it is paused\n if self.state == 3: #shows game over\n game_over_overlay(self.width//2, 7*self.height//12, self.score)", "def on_draw(self):\n\n # clear the screen to begin drawing\n arcade.start_render()\n\n # draw each object\n self.ball.draw()\n self.paddle.draw()\n\n self.draw_score()", "def draw_instruction():\r\n arcade.draw_text(\r\n \"This is a game of Santa, Reindeer, Snowman\", 0, 50, arcade.color.WHITE, 15\r\n )\r\n arcade.draw_text(\r\n \"Santa beats snowman, snowman beats reindeer, reindeer beats santa\",\r\n 0,\r\n 30,\r\n arcade.color.WHITE,\r\n 13,\r\n )\r\n arcade.draw_text(\r\n \"Press button 1 for santa, 2 for reindeer, and 3 for snowman\",\r\n 0,\r\n 10,\r\n arcade.color.WHITE,\r\n 15,\r\n )\r\n arcade.draw_text(\r\n \"User Choice\", WINDOW_WIDTH - 175, WINDOW_HEIGHT - 60, arcade.color.WHITE, 15\r\n )\r\n arcade.draw_text(\"CPU Choice\", 75, WINDOW_HEIGHT - 60, arcade.color.WHITE, 15)", "def draw(self, surface, offset=(0,0)):\n mouse = pg.mouse.get_pos()\n pos = mouse[0]-offset[0], mouse[1]-offset[1]\n if self.clicked:\n fill_color = pg.Color(\"white\")\n text = self.selected_text\n elif self.rect.collidepoint(pos):\n fill_color = (198, 226, 255)\n text = self.selected_text\n else:\n fill_color = self.color\n text = self.text\n surface.fill(pg.Color(\"black\"), self.rect)\n surface.fill(fill_color, self.rect.inflate(-2,-2))\n surface.blit(text, self.text_rect)", "def draw(self, win):\n self.rect.draw(win)\n self.text.draw(win)", "def draw(self, draw_surface):\n\n # The menu frame and how many surf (frame that appears in the middle\n # of the bottom of the screen).\n draw_surface.blit(self.menu_frame, (176, 112))\n draw_surface.blit(self.how_many_surf, (40, 115))\n\n if self.confirm_toss_response_dialogue is None and \\\n self.threw_away_dialogue is None:\n self.quantity_cursor.draw(draw_surface)\n\n # If on the trow away dialogue we don't need to draw anything else (it\n # is taken care of in the how many surf). Return so that cursor and\n # yes no surf are not drawn.\n if self.threw_away_dialogue is not None:\n return\n\n elif self.confirm_toss_response_dialogue is not None:\n draw_surface.blit(self.yes_no_surf, (195, 127))\n self.cursor.draw(draw_surface)", "def _drawObject(self, object):\n if object.draw:\n Game.Screen.blit(self._getCurrentObjectFrame(object), (object.position.x, Game.ScreenHeight - (object.position.y + object.objectType.height)))", "def draw_piece(self):\n self.screen.blit(self.image, self.rect)", "def draw(self, draw_surface):\n draw_surface.blit(self.menu_frame, (140, 71))\n draw_surface.blit(self.cost_surf, end_at(self.cost_surf, (202, 87)))\n self.quantity_cursor.draw(draw_surface)", "def draw_item(self):\r\n self.screen.blit(self.spawned_item, self.rect)", "def draw(self, shape):\r\n if not self.s_flg:\r\n opengles.glEnable(GL_SCISSOR_TEST)\r\n opengles.glScissor(ctypes.c_int(int(0)), ctypes.c_int(self.y0),\r\n ctypes.c_int(self.ix), ctypes.c_int(1))\r\n self.s_flg = True\r\n shape.draw(shader=self.shader)", "def draw(self, x, y):\r\n for w in self.widgets:\r\n if w.visible:\r\n w.draw()\r\n self.pointer.position(x + self.p_dx, y + self.p_dy, 0.5)\r\n self.pointer.draw()", "def draw():\n clear()\n\n for target in targets:\n goto(target.x, target.y)\n dot(20, \"blue\")\n\n if inside(ball):\n goto(ball.x, ball.y)\n dot(6, \"red\")\n\n update()", "def setup_draw(self):\n pass", "def draw(self, *args, **kwds):\n Visual.draw(self, *args, **kwds)", "def draw(self):\n return self._draw", "def on_draw():\n window.clear()\n world.draw()", "def draw(self):\r\n arcade.draw_circle_filled(self.center.x, self.center.y, BULLET_RADIUS, BULLET_COLOR)", "def drawCells(self):\r\n self.drawing = not self.drawing\r\n if self.drawing:\r\n self.draw_button['text'] = \"No Draw\"\r\n else:\r\n self.draw_button['text'] = \"Draw\"", "def draw(self, view):\n for i in self.get_aliens():\n for n in i:\n if not n is None:\n n.draw(view)\n self.get_ship().draw(view)\n self.get_ship().get_linevalue().draw(view)\n for i in self.get_PU():\n i.draw(view)\n for i in self.get_bolts():\n i.draw(view)", "def draw(self, draw_surface):\n if not self.can_afford:\n self.cant_afford_dialogue.draw(draw_surface)\n else:\n draw_surface.blit(self.in_bag_frame, (2, 81))\n self.how_many_dialogue.draw(draw_surface)\n self.how_many_selector.draw(draw_surface)", "def draw(self):\n self.write_image()\n self.update()", "def draw(self, draw_circle):\n draw_circle(self.color, (int(self.position[0]), int(self.position[1])), self.size)", "def draw(self, view):\n for r in self._aliens:\n for alien in r:\n if alien != None:\n alien.draw(view)\n if self._ship != None:\n self._ship.draw(view)\n self._dline.draw(view)\n for bolt in self._bolts:\n bolt.draw(view)", "def on_draw(self):\n # Clearing the buffers\n self.clear()\n self.set3d()\n # Makes it so color can be added\n glColor3d(1, 1, 1)\n\n self.push(self.player.pos, self.player.rot)\n self.model.draw()\n glPopMatrix()\n self.model.process_queue_slowly()\n\n # Draws the crosshairs on the screen\n self.set2d()\n self.draw_position_label()\n self.draw_reticle()", "def draw(self,renderer,dx,dy):\n for i in self.itemType.find('display'):\n if i.tag == 'rect':\n colors = i.find('color').text[1:-1].split(',')\n SDL_SetRenderDrawColor(renderer,int(colors[0]),int(colors[1]),int(colors[2]),int(colors[3]) if len(colors) > 3 else 255)\n rect = SDL_Rect()\n rect.x, rect.y = self.getPos()\n rect.x, rect.y = rect.x+dx,rect.y+dy\n rect.w, rect.h = self.getSize()\n SDL_RenderFillRect(renderer,rect)", "def draw(self):\n self.ball_sprite.draw()", "def draw(self, draw_surface):\n self.give_sub_event.draw(draw_surface)", "def draw(self, state):\n if state is None:\n state = self.model.current_state\n for row in range(len(self.model.maze.walls)):\n self.__draw_row_division()\n print(\" {0:2d} \".format(row), end='') # Imprime número da linha\n\n for col in range(len(self.model.maze.walls[0])):\n if self.model.maze.walls[row][col] == 1:\n print(\"|XXX\", end='') # Desenha parede\n elif self.model.goal_state.get_element(Coordinate(row, col)):\n if state.player.row == row and state.player.col == col:\n print(\"|G-P\", end='') # Desenha objetivo e jogador.\n elif state.get_element(Coordinate(row, col)):\n print(\"|G-B\", end='') # Desenha objetivo e caixa.\n else:\n print(\"| G\", end='') # Desenha objetivo\n elif state.player.row == row and state.player.col == col:\n print(\"| P\", end='') # Desenha jogador\n elif state.get_element(Coordinate(row, col)):\n print(\"| B\", end='') # Desenha caixa.\n else:\n print(\"| \", end='') # Desenha vazio\n print(\"|\")\n if row == (len(self.model.maze.walls) - 1):\n self.__draw_row_division()", "def draw(self):\n ui.clear()\n ui.draw_board(self)\n ui.output_buffer()", "def draw(self, draw_line):\n #draw_rect(self.color, (self.position, (self.size, self.size)))\n line_start = (int(self.position[0]), int(self.position[1] - self.size/2))\n line_end = (int(line_start[0] + self.size), line_start[1])\n draw_line(self.color, line_start, line_end, self.size)\n\n gun_start = (int(self.position[0] + self.size/2), line_start[1])\n gun_end = (int(gun_start[0] + math.cos(self.angle) * self.barrel), int(gun_start[1] - math.sin(self.angle) * self.barrel))\n draw_line(self.color, gun_start, gun_end, 5)", "def on_draw(self):\n arcade.start_render()\n arcade.draw_lrwh_rectangle_textured(0, 0,\n constants.SCREEN_WIDTH * 1, constants.SCREEN_HEIGHT * 1,\n self.background, alpha=50)", "def draw(cls, renderer: sdl2.render.SDL_Renderer) -> None:\n raise NotImplementedError" ]
[ "0.77985585", "0.74365014", "0.7152805", "0.7152805", "0.7152805", "0.7152805", "0.7134484", "0.7120514", "0.70584196", "0.6996481", "0.6867047", "0.68482596", "0.6772941", "0.67679566", "0.67679566", "0.67679566", "0.67469466", "0.6740048", "0.67351854", "0.66578966", "0.66570926", "0.6621138", "0.65413314", "0.65283775", "0.6522965", "0.6522075", "0.6520664", "0.65203404", "0.651067", "0.64691144", "0.64691144", "0.64451396", "0.64173603", "0.639245", "0.63817656", "0.6378497", "0.63586366", "0.63519514", "0.63446784", "0.6311736", "0.63097024", "0.62947625", "0.6286205", "0.62674195", "0.6265972", "0.6264812", "0.6264511", "0.62627304", "0.6258327", "0.62534827", "0.62527424", "0.6241082", "0.6233011", "0.622965", "0.6227858", "0.622643", "0.62216365", "0.6218841", "0.62109447", "0.62094545", "0.6189148", "0.6184267", "0.6184228", "0.6172461", "0.61665773", "0.61639655", "0.6163145", "0.61631364", "0.61608547", "0.61556154", "0.61516", "0.61408633", "0.6137453", "0.6136984", "0.61357033", "0.6128665", "0.6117781", "0.6116456", "0.6114472", "0.6110863", "0.6104715", "0.60968584", "0.60887754", "0.60833955", "0.60794723", "0.60793024", "0.60763025", "0.6073308", "0.60709846", "0.6064682", "0.6063779", "0.6062538", "0.6061648", "0.6053698", "0.6050937", "0.60482", "0.6043973", "0.6042363", "0.6035647", "0.6034814" ]
0.62185705
58
draws the text and selection and caret
def drawEditText(self, font, text, x, y, selection=(0,0), caret=-1): self.color = Vec4(*font.color) name = font.name char_count = 0 ox = x baseLetter = self.atlas.getChar(name + str(ord("T"))) omaxh = baseLetter[3] - baseLetter[4][1] for line in text.split("\n"): build = [] maxh = omaxh for c in line: if char_count == caret: u,v,w,h,e = self.atlas.getChar(name + str(ord('|'))) build.append((x-w/2,y+e[1],u,v,w,h)) char_count += 1 code = ord(c) if code <= 32: u,v,w,h,e = self.atlas.getChar(name + str(77)) x += e[0] continue u,v,w,h,e = self.atlas.getChar(name + str(code)) build.append((x,y+e[1],u,v,w,h)) x += e[0] maxh = max(maxh,h-e[1]) else: if char_count == caret: u,v,w,h,e = self.atlas.getChar(name + str(ord('|'))) build.append((x-w/2,y+e[1],u,v,w,h)) char_count += 1 for x,y,u,v,w,h in build: self.rectStreatch((x,y+maxh-h,w,h),(u,v,w,h)) x = ox y += maxh
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw(self):\n if self.dirty:\n self._render()\n for text in self.text_lines:\n text.draw()", "def draw(self, win):\n self.rect.draw(win)\n self.text.draw(win)", "def text_draw(self, x, y, text, style={}):", "def draw(self, screen):\n lines = self.text.strip().split('\\n')\n y = self.y\n for line in lines:\n self.ui.show_text(line, (self.x, y), 30)\n y += 32", "def draw(self):\n base_x = self.term.width // 2\n base_y = (self.term.height - len(self.OPTIONS)) // 2\n print(end=self.term.home + self.term.clear)\n print(\n self.term.move_xy(base_x - 2, base_y - 2)\n + self.term.green_bold\n + \"SNEK\"\n + self.term.normal\n )\n for index, (label, _action) in enumerate(self.OPTIONS):\n x = base_x - len(label) // 2\n y = base_y + index\n if index == self.selection_index:\n style = self.term.bold_red_reverse\n else:\n style = self.term.red\n print(self.term.move_xy(x, y) + style + label + self.term.normal)", "def draw():", "def draw(self):\n self.screen.fill(WHITE)\n self.color_invalid()\n self.draw_selected()\n self.shade_locked_cells()\n self.draw_grid()\n self.draw_buttons()\n self.draw_numbers()", "def draw(self, surface, offset=(0,0)):\n mouse = pg.mouse.get_pos()\n pos = mouse[0]-offset[0], mouse[1]-offset[1]\n if self.clicked:\n fill_color = pg.Color(\"white\")\n text = self.selected_text\n elif self.rect.collidepoint(pos):\n fill_color = (198, 226, 255)\n text = self.selected_text\n else:\n fill_color = self.color\n text = self.text\n surface.fill(pg.Color(\"black\"), self.rect)\n surface.fill(fill_color, self.rect.inflate(-2,-2))\n surface.blit(text, self.text_rect)", "def draw(self):", "def draw(text: list):\n\n curses.wrapper(wrapper, text)", "def draw_text(self, text, i, j, **params):", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self):\n pass", "def draw(self):\r\n pygame.draw.rect(self.screen, self.background_color, self.bounds)\r\n line_window = self.lines[self.scroll_window_top:self.scroll_window_bottom]\r\n for idx,line in enumerate(line_window):\r\n text = self.font.render(line, True, self.foreground_color)\r\n x,y = self._get_x_y_from_pos(self.position[0], self.position[1]+idx)\r\n self.screen.blit(text,(x,y))\r\n \r\n if self.cursor_visible and self.scroll_window_bottom == len(self.lines):\r\n x,y = self._get_x_y_from_pos(len(line_window[-1]), len(line_window))\r\n cursor_rect = pygame.Rect(x,y,\r\n self.text_width,self.text_height)\r\n pygame.draw.rect(self.screen, self.foreground_color, cursor_rect)", "def flush(self, header, caret, select_start_pos, select_end_pos, scr_topleft, scr_bottomright):\n self.update_screen_size()\n self.stdscr.erase()\n # header\n for text, color in header:\n self.stdscr.addstr(text, color_pair(color))\n text_selected = select_start_pos is not None\n # display lines\n displayed_lines = self.lines[scr_topleft.y : min(len(self.lines), scr_bottomright.y)]\n for index, line in enumerate(displayed_lines):\n self.stdscr.addstr(PADCHAR)\n if len(line) >= scr_topleft.x:\n # inclusive, position of line start and line end of displayed line\n ln_start = Position(scr_topleft.y + index, scr_topleft.x)\n ln_end = Position(scr_topleft.y + index, scr_topleft.x + self.screen_width())\n displayed_line = line[ln_start.x : min(len(line), scr_bottomright.x - 1)]\n if text_selected:\n # whether start position and end position of line are between selection\n start_between = ln_start.is_between(select_start_pos, select_end_pos)\n end_between = ln_end.is_between(select_start_pos, select_end_pos)\n # whether selection is between start and end position\n select_start_between = select_start_pos.is_between(ln_start, ln_end)\n select_end_between = select_end_pos.is_between(ln_start, ln_end)\n if start_between and end_between:\n # completely enclosed\n self.stdscr.addstr(displayed_line, color_pair(7))\n elif start_between:\n # only start between selection\n # end is on same line\n # only starting portion is highlighted\n self.stdscr.addstr(displayed_line[ : select_end_pos.x - ln_start.x + 1], color_pair(7))\n self.stdscr.addstr(displayed_line[select_end_pos.x - ln_start.x + 1 : ])\n elif end_between:\n # only end between selection\n # start is on same\n # only ending portion is highlighted\n self.stdscr.addstr(displayed_line[ : select_start_pos.x - ln_start.x])\n self.stdscr.addstr(displayed_line[select_start_pos.x - ln_start.x : ], color_pair(7))\n elif select_start_between and select_end_between:\n # selection is all on this line\n # start and end not highlighted\n self.stdscr.addstr(displayed_line[ : select_start_pos.x - ln_start.x])\n self.stdscr.addstr(\n displayed_line[select_start_pos.x - ln_start.x : select_end_pos.x - ln_start.x + 1],\n color_pair(7)\n )\n self.stdscr.addstr(displayed_line[select_end_pos.x + 1 - ln_start.x : ])\n else:\n # not enclosed by selection at all\n self.stdscr.addstr(displayed_line)\n else:\n self.stdscr.addstr(displayed_line)\n if index != len(displayed_lines) - 1:\n self.stdscr.addstr('\\n')\n self.stdscr.move(caret.y - scr_topleft.y + HEADER_LEN, caret.x - scr_topleft.x + PAD_LEN)", "def draw(self):\n\t\tpass", "def draw(self):\n ui.clear()\n ui.draw_board(self)\n ui.output_buffer()", "def draw(self, width, height):\n \n line_spacing = 20\n \n\n #TODO:Smart algorithm to map mouse position to the scrolling speed\n #zooming level should go here\n \n if self.scroll > 20:\n self.factor = self.scroll * 0.1\n\n elif self.scroll < -20:\n self.factor = abs(self.scroll) * 0.1\n \n elif abs(self.scroll) > 50:\n self.factor = 5\n self.scroll = 50\n\n else:\n self.factor = 0\n \n output_text = \"\"\n\n if self.text:\n l = self.min_text\n l1 = l\n l2 = l + 1\n \n tab_previous = self.tab_index[l]\n \n while l < self.max_text:\n \n #Find all the lines with the same indentation level\n while l < self.line_count - 2 and self.tab_index[l + 1] == tab_previous:\n l2 += 1 \n l += 1\n \n self.tab_cairo += tab_previous * 20\n font_size = int(self.zoom - (tab_previous * self.factor))*pango.SCALE\n \n #Set a minimum font size\n if font_size < 8000:\n font_size = 8000\n \n pango.FontDescription.set_size(self.desc, font_size)\n self.pg.set_font_description(self.desc)\n \n #Adjust line spacing as font size decreases\n line_spacing -= tab_previous * 0.5 \n\n self.cr.move_to(self.tab_cairo, self.max_cairo)\n \n output_text = '\\n'.join(self.text[l1:l2])\n \n self.pg.set_text(output_text)\n self.cr.show_layout(self.pg)\n\n self.max_cairo += line_spacing * (l2 - l1) \n \n #Reset all values\n self.tab_cairo = 20\n line_spacing = 20\n l += 1\n \n try:\n tab_previous = self.tab_index[l]\n \n except IndexError:\n tab_previous = self.tab_index[-1]\n \n l1 = l\n l2 = l + 1", "def paint(self):\r\n self.win.bkgd(\" \", COLOR_PAIR[\"con_text\"])", "def draw(self):\n if not self.pressed:\n #draw dialogue prompt\n arcade.draw_rectangle_filled(self.center_x, self.center_y, 20, 20, arcade.color.ALABAMA_CRIMSON)\n arcade.draw_text(\"!\", self.center_x, self.center_y, arcade.color.BLACK, anchor_x=\"center\", anchor_y=\"center\")\n else:\n #draw dialogue box\n arcade.draw_rectangle_filled(self.center_x, self.center_y, self.width, self.height, self.color)\n arcade.draw_text(self.text, self.center_x, self.center_y, arcade.color.BLACK, anchor_x=\"center\", anchor_y=\"center\")", "def draw( self, **kw ):\n pass", "def on_draw(self):\n # draw everything", "def draw(self):\n if not self.pressed:\n #draw info prompt in room\n arcade.draw_rectangle_filled(self.center_x, self.center_y, 20, 20, arcade.color.ANTIQUE_BRASS)\n arcade.draw_text(\"?\", self.center_x, self.center_y, arcade.color.BLACK, anchor_x=\"center\", anchor_y=\"center\")\n else:\n #draw info to top of screen when clicked\n arcade.draw_text(self.text, 10, settings.HEIGHT - 10, arcade.color.BLACK, anchor_x=\"left\", anchor_y=\"top\")", "def draw(self):\n if context.click():\n self.place()", "def draw(self, offset=geo.zero2d()):\n self._draw_bar(self._get_active_color(), self.inactive_color, offset)\n console.set_default_color_fg(self.text_color)\n self._draw_numbers(offset)", "def draw(self):\n raise NotImplementedError", "def draw(self):\n raise NotImplementedError", "def draw(self):\n raise NotImplementedError", "def updateDraw(self):\r\n self.delConns()\r\n self.delTags()\r\n self.drawConns()\r\n self.drawTags()", "def _render(self):\n self.dirty = False\n self.text_lines = [TextLine(self.font, self.font_size, line) for line in self._text_paragraph]\n self.text_lines[0].rect.top = self.offset.top\n\n # offset the height of each line\n prev = Rect(0, 0, 0, 0)\n for t in self.text_lines:\n t.rect.top += prev.bottom\n t.rect.left = self.offset.left\n prev = t.rect", "def OnDrawGTKText(self, dc):\r\n\r\n self._pButton.OnDrawGTKText(dc)", "def _render(self):\n self.dirty = False\n self.image = self.font.render(self._text, self.aa, self.color_fg)\n self.rect = self.image.get_rect()", "def draw(self, grille):\n grille.clear_highlight()\n for x in range(8):\n for y in range(8):\n self.draw_c(x, y, grille)", "def draw(self):\n self.strip.show()", "def draw(self) -> None:\n offset = 7 + SELECTOR_HEIGHT\n x, y = 4, 5\n screen.fill(BLACK)\n for i in range(self.displayed_jobs_min, self.displayed_jobs_max + 1):\n\n # If this job is selected, hightlight it\n if i == self.sel_idx:\n self.__highlight_selection(x, y)\n\n # Draw the job + offset y each time\n self.draw_selection_item(self.jobs_list[i], x, y)\n y += offset", "def _show_selection(self, text, bbox):\r\n x, y, width, height = bbox\r\n\r\n textw = self._font.measure(text)\r\n\r\n canvas = self._canvas\r\n canvas.configure(width=width, height=height)\r\n canvas.coords(canvas.text, width - textw, height / 2 - 1)\r\n canvas.itemconfigure(canvas.text, text=text)\r\n canvas.place(in_=self._calendar, x=x, y=y)", "def __refreshContent(self):\n self._window.clear()\n self.drawBorder()\n for i in range(self.__firstShownLine,\n self.__firstShownLine + self.height - 2):\n if self._focused and i == self.__selectedRow:\n self._window.attron(curses.A_BOLD)\n self.__printRow(i)\n self._window.attroff(curses.A_BOLD)", "def draw(screen):\n MY.restart_button.draw(screen)\n MY.display_text.draw(screen)", "def _show_selection(self, text, bbox):\n x, y, width, height = bbox\n\n textw = self._font.measure(text)\n\n canvas = self._canvas\n canvas.configure(width=width, height=height)\n canvas.coords(canvas.text, width - textw, height / 2 - 1)\n canvas.itemconfigure(canvas.text, text=text)\n canvas.place(in_=self._calendar, x=x, y=y)", "def draw(self):\n if not self.exists:\n return\n if self.attributes[AT.TARGET_TYPE] == TargetType.TIMED:\n self.draw_frame_timed(self.text.opacity / 2 + 0.5)\n else:\n self.draw_frame(0.5)\n self.text.draw()", "def draw(self):\n self.figure.canvas.draw_idle()", "def drawCells(self):\r\n self.drawing = not self.drawing\r\n if self.drawing:\r\n self.draw_button['text'] = \"No Draw\"\r\n else:\r\n self.draw_button['text'] = \"Draw\"", "def __update_selection(self):\n if self.selected_offset != self.old_selected_offset:\n if self.old_selected_offset > -1:\n old_offset = (self.old_selected_offset - self.top_offset) * 8\n\n self.display.text(\">\", 0, old_offset, 0)\n\n new_offset = (self.selected_offset - self.top_offset) * 8\n self.display.text(\">\", 0, new_offset, 1)\n self.display.show()\n self.old_selected_offset = self.selected_offset", "def draw(self):\n\n for item in self.vis:\n item.undraw()\n self.render()\n for item in self.vis:\n item.draw(self.win)\n self.drawn = True", "def paint(self):\n if self.config['colorize']:\n self.highlight()\n else:\n self.clear_highlight()", "def draw(c):\n c.draw_text(format(current), (75, 190), 60, \"yellow\")\n c.draw_text(\"Score\", (200, 40), 28, \"white\", \"sans-serif\")\n c.draw_text(score(wins, tries), (208, 70), 30, \"white\", \"sans-serif\")", "def draw(self, screen):", "def pre_draw(self):", "def draw(self, context):\n rect = self.get_allocation()\n #initial context settings: line width & font\n context.set_line_width(1)\n font = gtk.Label().style.font_desc.get_family()\n context.select_font_face(font,cairo.FONT_SLANT_NORMAL, \\\n cairo.FONT_WEIGHT_NORMAL)\n \n self.draw_basics(context, rect)\n if self.data:\n self._do_draw(context, rect)", "def setup_draw(self):\n pass", "def on_draw(self):\n\t\tself.render()", "def on_draw(self):\n\t\tself.render()", "def draw(self):\n arcade.draw_rectangle_outline(self.position_x, self.position_y, self.radius, self.color)", "def draw(self):\n\n self.text_transfer.config(validate=\"focusout\", validatecommand=self.reset_counter)\n self.text_transfer.delete(0, tk.END)\n tk.Label(self.text_frame, text=\"Letter to send: \").grid(row=1)\n self.text_transfer.grid(row=1, column=1)\n send_text_button = tk.Button(self.text_frame, text=\"Send\", width=10, command=self.send_text())\n send_text_button.grid(row=1, column=4, pady=20, padx=20)\n tk.Button(self.text_frame, text=\"<\", width=2, command=self.update_counter(-1)).grid(row=1, column=2)\n tk.Button(self.text_frame, text=\">\", width=2, command=self.update_counter(1)).grid(row=1, column=3)\n\n available_serials = get_available_serials()\n\n for i in range(len(available_serials)):\n self.ports_listbox.insert(i + 1, available_serials[i])\n\n self.connect_msg = tk.Label(self.connect_frame, text=\"Port: \").grid(row=1)\n self.ports_listbox.grid(row=1, column=1)\n self.connect_button = tk.Button(self.connect_frame, text=\"Connect\", width=10, command=self.connect())\n self.connect_button.grid(row=1, column=2, pady=20, padx=20)\n tk.Label(self.connect_frame, text=\"Connection Parameters\", font=(None, 16)).grid(row=0, padx=15, pady=15)\n tk.Label(self.text_frame, text=\"Text Transfer\", font=(None, 16)).grid(row=0, padx=15, pady=15)\n tk.Label(self.electric_frame, text=\"Electric Parameters\", font=(None, 16)).grid(row=0, padx=15, pady=15)\n self.freq_edit.delete(0, tk.END)\n self.freq_edit.delete(0, tk.END)\n tk.Label(self.electric_frame, text=\"Frequency: \").grid(row=1)\n self.freq_edit.grid(row=1, column=1)\n tk.Label(self.electric_frame, text=\"Duty ratio: \").grid(row=2)\n self.duty_edit.grid(row=2, column=1)\n change_freq_button = tk.Button(self.electric_frame, text=\"Set\", width=10, command=self.set_frequency(), padx=10)\n change_freq_button.grid(column=2)\n file = open(r'params.pkl', 'rb')\n freq = pickle.load(file)\n duty = pickle.load(file)\n print (\"freq is \", freq)\n print (\"duty is \", duty)\n self.freq_edit.insert(0, freq)\n self.duty_edit.insert(0, duty)\n file.close()", "def draw(self):\n self.menu_pointer.draw()", "def _draw_line_text(self):\n self._line_text.set_text(self.model.get_current_line())", "def draw(self, renderer):\n renderer.drawRect(pyui.colors.black, self.windowRect)\n renderer.drawText( \"Strokes: %d\" % len(self.strokes), (650,50), pyui.colors.white)\n for start, end, color in self.strokes:\n renderer.drawLine(start[0], start[1], end[0], end[1], color)", "def display_text(self, text, size=None, colr=None,\r\n x = None, y = None,\r\n new_line = None):\r\n if size is None:\r\n size = self.dt_size\r\n self.size = size\r\n if colr is None:\r\n colr = self.text_color\r\n self.text_color = colr\r\n if new_line is not None:\r\n if x is not None or y is not None:\r\n raise Exeception(\"Must not have new_line and x,y\")\r\n else:\r\n if x is not None or y is not None:\r\n new_line = False\r\n else:\r\n new_line = True\r\n if new_line:\r\n x = self.dt_x = self.disp_left\r\n self.dt_y -= size*self.font_size_to_ch\r\n y = self.dt_y\r\n #print(f\"new_line: y:{y} dt_y:{self.dt_y}\")\r\n else:\r\n if x is None:\r\n x = dt_x\r\n self.dt_x = x\r\n if y is None:\r\n y = self.dt_y\r\n self.dt_y = y\r\n #print(f\"display_text: text:{text} x:{x}, y:{y}\")\r\n tu.penup()\r\n if y < self.disp_bottom + self.disp_boarder:\r\n continue_msg = \"Press ENTER to continue\"\r\n inp = input(continue_msg)\r\n self.clear_text() # Only option \r\n \r\n tu.goto(x,y)\r\n tu.pendown()\r\n \r\n tu.color(colr)\r\n font = (\"Arial\", size, \"normal\")\r\n #print(f\"colr:{colr} text:{text} font:{font}\")\r\n #print(f\"xcor():{tu.xcor()} ycor():{tu.ycor()}\")\r\n tu.write(text, align=\"left\", font=font)", "def draw(self):\n\n # Use update instead of update_idletasks because it works better\n # on some Windows machines.\n self.root.update()", "def input(self, event):\n k, font, lines = self._cursor, self.font, self._lines\n rect_w, txt, wraps = self.rect.w, self.txt, self._wraps\n image, bkg_img = self.image, self._image\n draw_line, index2line = self._draw_line, self._index2line\n pixel2index, update = self._pixel2index, self._update\n Line = _Line\n\n l = index2line(k)\n #update(l)\n\n \n if event.type == KEYDOWN:\n #l = index2line(k)\n\n # handle cursor navigation\n if event.key in DIRECTION_KEYS:\n draw_line(l)\n if event.key == K_UP:\n if l > 0:\n pixel = (font.size(''.join(txt[wraps[l]:k]))[0],\n lines[l - 1].rect.y)\n self._cursor = pixel2index(pixel)\n\n else: self._cursor = 0\n\n elif event.key == K_DOWN:\n if (len(wraps) - 1) > l:\n pixel = (font.size(''.join(txt[wraps[l]:k]))[0],\n lines[l + 1].rect.y)\n self._cursor = pixel2index(pixel)\n\n else: self._cursor = len(txt) - 1\n\n elif event.key == K_LEFT:\n self._cursor -= 1\n if self._cursor < 0: self._cursor = 0\n\n elif event.key == K_RIGHT:\n self._cursor += 1\n if self._cursor > (len(txt) - 1):\n self._cursor = len(txt) - 1\n\n # handle newlines\n elif event.key == K_RETURN:\n\n txt.insert(k, '\\n')\n self._cursor += 1\n lines.insert(l + 1, Line(Surface((0, 0)), Rect(0, 0, 0, 0)))\n update(l)\n\n # handle backspaces\n elif event.key == K_BACKSPACE:\n if k == 0: pass\n\n else:\n lines[l].clear(image, bkg_img)\n self._cursor -= 1\n k = self._cursor\n char = txt.pop(k)\n # if l> index2line(k) then the line no longer exists\n if l > index2line(k): del lines[l]\n if l == 0: update(l)\n else: update(l - 1)\n\n # handle ascii input\n else:\n if event.unicode:\n txt.insert(k, event.unicode.encode(\"ascii\"))\n self._cursor += 1\n if k == wraps[l]: # wrapped to a new line\n if l == 0: update(l) # update line\n else: update(l - 1) # else: update from previous line\n else: update(l)\n update(l)", "def draw(self):\n #for (x, y) in self.coords:\n # pyxel.rect(\n # (x + self.x) * 4,\n # (y + self.y) * 4,\n # (x + self.x) * 4 + 3,\n # (y + self.y) * 4 + 3,\n # self.color)", "def drawUI(self):\n cv2.rectangle(self.root, (0, self.height - 80), (self.width, self.height), (50, 50, 50), -1) # bar\n cv2.putText(self.root, 'Zavri s Q...', (20, self.height - 25), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 1, cv2.LINE_AA)", "def draw(self):\n if self.master != None :\n outline = Cell.FILLED_COLOR_BORDER if self.fill else Cell.EMPTY_COLOR_BORDER\n\n xmin = self.abs * self.size\n xmax = xmin + self.size\n ymin = self.ord * self.size\n ymax = ymin + self.size\n\n self.master.create_rectangle(xmin, ymin, xmax, ymax, fill = self.fill, outline = outline)", "def draw_text_image(self):\n\t\ttext_image = Dialog.draw_text_image(self)\n\t\tif(self.index/SCROLL_CONSTANT >= len(self.text)):\n\t\t\tarrow = Surface((12, 12)) #TEMP\n\t\t\ttext_image.blit(arrow, (4, 8 + 32 * (1 + self.select_index))) #TEMP VALUES\n\t\t\t#TODO: draw arrow pointing to currently selected option (currently just a black square)\n\t\treturn text_image", "def draw(self):\n self.vertex_list.draw(pyglet.gl.GL_QUADS)\n self.label.draw()", "def draw(self, force=False):\n self.display.draw(force)", "def draw(self, draw_surface):\n super().draw(draw_surface)\n if self.sub_event is not None:\n self.sub_event.draw(draw_surface)\n else:\n self.text_cursor.draw(draw_surface)", "def draw(self):\n if self.is_clicked:\n pg.draw.circle(self.window, self.color, (self.x, self.y), self.r, 0)\n else:\n pg.draw.circle(self.window, self.color, (self.x, self.y), self.r, 1)", "def draw(self, win):\n font = pygame.font.SysFont(\"comicsans\", 40)\n\n gap = self.width / 9\n x = self.col * gap\n y = self.row * gap\n\n if self.temp != 0 and self.value == 0:\n text = font.render(str(self.temp), 1, (128,128,128))\n win.blit(text, (x+5, y+5))\n elif not(self.value == 0):\n text = font.render(str(self.value), 1, (0, 0, 0))\n win.blit(text, (x + (gap/2 - text.get_width()/2), y + (gap/2 - text.get_height()/2)))\n\n if self.selected:\n pygame.draw.rect(win, (255,0,0), (x,y, gap ,gap), 3)", "def draw(self, ctx):\n self.set_size(self.width, self.available_height) \n #Drawing cell lines\n for i in range(0, (max(self.available_width,int(self.width)) / self.cell_width) + 1):\n ctx.move_to(i * self.cell_width, 0)\n ctx.line_to(i * self.cell_width, self.available_height)\n ctx.set_line_width(1)\n red = float(self.get_style().fg[gtk.STATE_INSENSITIVE].red) / 65535\n green = float(self.get_style().fg[gtk.STATE_INSENSITIVE].green) / 65535\n blue = float(self.get_style().fg[gtk.STATE_INSENSITIVE].blue) / 65535\n ctx.set_source_rgba(red, green, blue, 0.3)\n ctx.stroke()\n greatest = self.calculate_greatest() \n # Drawing scale lines\n step = greatest / 5\n ctx.save()\n ctx.set_dash([5],5)\n for i in range(int(step), int(greatest),5):\n ctx.move_to(0, self.available_height - (self.available_height - 20) * i / greatest)\n ctx.line_to(max(self.available_width,int(self.width)), self.available_height - (self.available_height - 20) * i / greatest)\n ctx.set_source_rgba(red,green,blue,0.3)\n ctx.stroke()\n\n ctx.restore()\n # Drawing the diagram\n loadingCopy = copy.deepcopy(self.loading)\n colorIndex = 0\n loadingKeys = loadingCopy.keys()\n loadingKeys.sort()\n for key in loadingKeys:\n while loadingCopy[key] != []:\n x1, y1 = loadingCopy[key].pop(0)\n if loadingCopy[key] != []:\n x2, y2 = loadingCopy[key][0]\n else:\n x2 = self.duration\n ctx.line_to (x1 * self.cell_width, self.available_height - (self.available_height - 20) * y1 / greatest)\n ctx.line_to (x2 * self.cell_width, self.available_height - (self.available_height - 20) * y1 / greatest)\n \n ctx.set_line_width(2)\n ctx.set_source_rgba(self.colors[colorIndex][0], self.colors[colorIndex][1], self.colors[colorIndex][2],0.5)\n ctx.stroke()\n colorIndex = (colorIndex + 1) % 11", "def draw( self ):\n\n if self.__drawnGrid == 0:\n draw_grid().draw()\n\n self.__drawnGrid = 1\n\n column = 0\n row = 0\n i = 0\n for mark in self.__grid:\n if row == 0:\n turtle.goto(-60+60*column, 60)\n elif row == 1:\n turtle.goto(-60+60*column, 0)\n elif row == 2:\n turtle.goto(-60+60*column, -60)\n\n if isinstance(mark, str):\n if mark.lower() == 'x': \n drawX(i)\n elif mark.lower() == 'o':\n drawO(i)\n\n column += 1\n\n if column == 3:\n column = 0\n row += 1\n\n i+=1\n\n turtle.goto(-60, 60)", "def draw(self, *args, **kwargs):\n self.window.clear()\n self.batch.draw()", "def draw(self):\n self.draw_occupied_cells()\n self.draw_open_cells()\n self.draw_edges()\n plt.xlabel(\"Red\")\n plt.ylabel(\"Black\")\n plt.title('Hex')\n self.camera.snap()", "def draw(self):\n # s1 = ShowPoint(self.cnv, self.p1.xpt, self.p1.ypt)\n # s2 = ShowPoint(self.cnv, self.p2.xpt, self.p2.ypt)\n # s1.draw()\n # # s2.draw()\n self.cnv.create_line(self.p1.xpt, self.p1.ypt, self.p2.xpt, self.p2.ypt)", "def on_draw(self, da, ctx):\n self.referee.get_current_state().draw(ctx)", "def _draw(self, canvas, options):\n pass # must override in subclass", "def paint(self, draw, x, y, w, h):\n\t\tpass", "def on_draw(self, ctx):\n gradient = cairo.LinearGradient(0, self.height * 2, 0, 0)\n gradient.add_color_stop_rgba(0.1, 0.1, 0.1, 0.1, 0.7)\n gradient.add_color_stop_rgba(0.1, 0.1, 0.1, 0.1, 0.75)\n ctx.set_source(gradient)\n self.draw_rectangle_advanced (ctx, self.pos[0], self.pos[1], self.__width - 20,\n self.__height - 280,\n rounded_angles=(5, 5, 5, 5),\n fill=True, border_size=1,\n border_color=(0, 0, 0, 0.25),\n shadow_size=10,\n shadow_color=(0, 0, 0, 0.25))\n # Make sure we have a pango layout initialized and updated.\n if self.p_layout == None :\n self.p_layout = ctx.create_layout()\n else:\n ctx.update_layout(self.p_layout)\n p_fdesc = pango.FontDescription()\n p_fdesc.set_family(\"Garuda\");\n p_fdesc.set_size(20 * pango.SCALE)\n self.p_layout.set_font_description(p_fdesc);\n pos = [(self.pos[0]+self.__width/2-40), self.pos[1]+5]\n ctx.set_source_rgb(1, 1, 1)\n x=0;\n self.__selected = None\n ctx.save()\n ctx.translate(*pos)\n txt = \"To-Do\";\n self.p_layout.set_markup('%s' % txt)\n ctx.show_layout(self.p_layout)\n ctx.restore()\n x += 1\n p_fdesc.set_family(\"Free Sans\");\n p_fdesc.set_size(10 * pango.SCALE)\n self.p_layout.set_font_description(p_fdesc);\n pos = [self.pos[0]+20, self.pos[1] + 60];\n self.__position = [];\n for item in self.__items:\n ctx.set_source(gradient);\n ctx.set_line_width (10);\n ctx.rectangle(self.pos[0]-20,pos[1]+4,7,7);\n ctx.fill();\n self.__position.append((pos[1]+4,item));\n self.draw_rectangle_advanced (ctx, self.pos[0], pos[1]-14, self.__width - 20,\n\t\t\t\t\t\t\t\t self.__height - (295),\n\t\t\t\t\t\t\t\t rounded_angles=(5, 5, 5, 5),\n\t\t\t\t\t\t\t\t fill=True, border_size=1,\n\t\t\t\t\t\t\t\t border_color=(0, 0, 0, 0.25),\n\t\t\t\t\t\t\t\t shadow_size=10,\n\t\t\t\t\t\t\t\t shadow_color=(0, 0, 0, 0.25))\n ctx.set_source_rgb(0.8,0.8,0.8);\n ctx.save()\n ctx.translate(*pos)\n self.p_layout.set_markup('%s' % item)\n ctx.show_layout(self.p_layout)\n pos[1] += 30\n ctx.restore()\n x += 1", "def draw(self):\n if self.master != None :\n fill = self.fill\n #fill = Cell.FILLED_COLOR_BG\n outline = Cell.EMPTY_COLOR_BORDER\n\n #if not self.fill:\n # fill = Cell.EMPTY_COLOR_BG\n # outline = Cell.EMPTY_COLOR_BORDER\n\n xmin = self.abs * self.size\n xmax = xmin + self.size\n ymin = self.ord * self.size\n ymax = ymin + self.size\n\n self.master.create_rectangle(xmin, ymin, xmax, ymax, fill = fill, outline = outline)", "def draw(self,ctx):\n step = self.greatest / 5\n # Drawing the scale\n ctx.set_source_color(self.get_style().fg[gtk.STATE_NORMAL])\n for i in range(int(step), int(self.greatest),5):\n x_bearing, y_bearing, txt_width, txt_height = ctx.text_extents(str(i))[:4]\n ctx.move_to(-10.5 - txt_width / 2 - x_bearing, self.available_height - (self.available_height - 20) * i / self.greatest - txt_height / 2 - y_bearing )\n\n ctx.show_text(str(i))", "def draw(self, win, outline=None):\n # Call this method to draw the button on the screen\n if outline:\n pygame.draw.rect(win, outline, (self.x - 2, self.y - 2, self.width + 4, self.height + 4), 0)\n\n pygame.draw.rect(win, self.color, (self.x, self.y, self.width, self.height), 0)\n\n if self.text != '':\n font = pygame.font.SysFont('comicsans', 30)\n text = font.render(self.text, 1, black)\n win.blit(text, (self.x + (self.width // 2 - text.get_width() // 2), self.y + (self.height // 2 - text.get_height() // 2)))", "def draw(self):\n arcade.draw_xywh_rectangle_filled(\n self.x, self.y, self.width, self.height, self.fill.color\n )\n arcade.draw_xywh_rectangle_outline(\n self.x, self.y, self.width, self.height, self.pen.color, 3\n )", "def draw_frame(self):\n if self.redraw:\n self.window.view.fill((0, 0, 0))\n self.menuview.fill(config.menubackcolor)\n\n columns = config.columns\n colwidth = self.textarea.get_width() / columns\n srow = self.selected % self.rows\n scol = self.selected / self.rows\n\n # adjust offset to within (columns) of col\n self.col_offset = min(scol, max(self.col_offset, scol - columns + 1))\n\n # render and blit each column of options that is showing\n # TODO: render all columns at init, and rerender only if font size or row count changes\n for c, col in enumerate(range(self.col_offset, columns)):\n opts = self.options[self.rows * col:self.rows * (col + 1)]\n opttext = self.font.render('\\n'.join(opt[0] for opt in opts),\n charheight=self.cheight, lineheight=self.rheight,\n tracking=1, color=config.menufontcolor)\n self.textarea.blit(opttext, (c * colwidth + self.cheight, 0))\n\n # blit marker\n mmargin = self.cheight / 4\n self.textarea.blit(self.marker, ((scol - self.col_offset) * colwidth + mmargin,\n srow * self.rheight + mmargin))\n\n self.redraw = False", "def draw(self):\n self.win.fill(BLACK)\n text_intro = \"\"\"Professor Marcelo gave you another bad grade.\\nBut not all is lost!\\nYou have just invaded his house and now have the chance to change your grade in the Professor's PC.\\nBut to have access you will need to answer an enigma.\\nBe quick, the Professor is coming from the theater in 5 minutes!\\nUse the arrow keys to move. Press SPACE to interact with the house objects. They may contain hints.\\nNow, press any key to start!\"\"\"\n \n self.messenger.draw_multiline_text(text_intro, self.font, 30, WHITE, (20, 0))", "def do_paint(self):\r\n curses.curs_set(0)\r\n if self.win:\r\n self.paint()\r\n self.done_paint()", "def draw_text(self, text, font, color, surface, x, y): #use for narrative in end sequence\n text_obj = font.render(text, True, color)\n text_rect = text_obj.get_rect()\n text_rect.center = (x, y)\n surface.blit(text_obj, text_rect)", "def draw(self, x, y):\r\n for w in self.widgets:\r\n if w.visible:\r\n w.draw()\r\n self.pointer.position(x + self.p_dx, y + self.p_dy, 0.5)\r\n self.pointer.draw()", "def draw(self):\r\n self.scr.fill(SCREEN_COLOR)\r\n self.label.draw()\r\n pygame.display.flip()", "def draw_key_selection(self, pane):\n\t\ty = self.option_index*(self.option_font_size + 8) + 2\n\t\tpoints = [ ( 296, y ), ( 380, y ), ( 380, y + 32 ), ( 296, y + 32 ) ]\n\t\tpygame.draw.lines( pane, RED , True, points, 2 )", "def draw(self):\n self.batch.draw()", "def drawPeekScrubber(self):\r\n self.c.delete(\"peekScrubber\")\r\n x = self.plot(self.peekTime,0)[0]\r\n self.c.create_line(x,0,x,self.h,fill=\"#666666\",tags=(\"peekScrubber\"))\r\n self.c.create_text(x+3,self.h-1,text=\"\", anchor = tk.SW,tags=(\"peekScrubberText\"))\r\n self.updatePeekScrubber()# Set text\r", "def draw(self):\n self.win.fill(BLACK)\n title1 = self.messenger.text_format(\"La casa\", self.font, 58, WHITE)\n title2 = self.messenger.text_format(\"de\", self.font, 48, WHITE)\n title3 = self.messenger.text_format(\"Marcelo\", self.font, 58, WHITE)\n\n title_rect1 = title1.get_rect()\n title_rect2 = title2.get_rect()\n title_rect3 = title3.get_rect()\n\n self.win.blit(title1, (WIDTH / 3.4 - (title_rect1[2] / 2), 90))\n pygame.draw.rect(self.win, RED, (238, 92, 45, 45))\n self.win.blit(title2, (WIDTH / 2 - (title_rect2[2] / 2), 95))\n self.win.blit(title3, (WIDTH / 1.4 - (title_rect3[2] / 2), 90))\n self.draw_menu(WHITE, WHITE)", "def update_editor ( self ):\n font = self.factory.to_wx_font( self )\n try:\n self._facename.SetStringSelection( font.GetFaceName() )\n except:\n self._facename.SetSelection( 0 )\n try:\n self._point_size.SetStringSelection( str( font.GetPointSize() ) )\n except:\n self._point_size.SetSelection( 0 )\n font.SetPointSize( min( 10, font.GetPointSize() ) )\n self._font.SetValue( self.str_value )\n self._font.SetFont( font )", "async def outline_text(draw_surface, coords, draw_text, font):\n draw = partial(draw_surface.text, text=draw_text, font=font,\n fill=\"black\")\n for offset_pair in product(range(-1, 2), repeat=2):\n draw((coords[0]+offset_pair[0], coords[1]+offset_pair[1]))\n draw(coords, fill=\"white\")", "def drawKeybind(position, text, anchor = \"center\"):\n\t\t# Draw the text\n\t\ttextObject = canvas.create_text(\n\t\t\tpixelFromPosition(Vector2(0, 0) + position),\n\t\t\ttext = text,\n\t\t\tfill = \"black\",\n\t\t\tfont = (\"Fixedsys\", InterfaceTools.fontSize(40), \"\"),\n\t\t\tanchor = anchor\n\t\t)\n\t\t# Get the bounding box of the text object in pixels\n\t\tboundingBox = canvas.bbox(textObject)\n\t\t# Convert the bounding box into position coordinates\n\t\tboundsMin = positionFromPixel(boundingBox[0], boundingBox[1])\n\t\tboundsMax = positionFromPixel(boundingBox[2], boundingBox[3])\n\t\tcenter = Vector2((boundsMin.x + boundsMax.x) / 2, (boundsMin.y + boundsMax.y) / 2)\n\t\t# Minimum rectangle size\n\t\tboundsMin.x = min(boundsMin.x - 0.05, -0.075 + center.x)\n\t\tboundsMin.y = min(boundsMin.y - 0.01, -0.075 + center.y)\n\t\tboundsMax.x = max(boundsMax.x + 0.05, 0.075 + center.x)\n\t\tboundsMax.y = max(boundsMax.y + 0.01, 0.075 + center.y)\n\t\t# Draw the rectangle\n\t\tcanvas.create_rectangle(\n\t\t\t*pixelFromPosition(boundsMin),\n\t\t\t*pixelFromPosition(boundsMax),\n\t\t\tfill = \"white\",\n\t\t\toutline = \"gray\",\n\t\t\twidth = InterfaceTools.imageScale(5)\n\t\t)\n\t\t# Position the text in front of the rectangle\n\t\tcanvas.tag_raise(textObject)", "def drawItemText(self, painter, rect, alignment, pal, enabled, text, text_role):\n if not text or text == '':\n return\n\n # Save our current pen if we need to\n saved_pen = None\n if text_role != QtGui.QPalette.NoRole:\n saved_pen = painter.pen()\n painter.setPen(QtGui.QPen(pal.brush(text_role), saved_pen.widthF()))\n\n # Render the text. There's a bit of voodoo here with the rectangles\n # and painter translation; there was various bits of finagling necessary\n # to get this to seem to work with both combo boxes and checkboxes.\n # There's probably better ways to be doing this.\n margin = 3\n painter.save()\n painter.translate(rect.left()-margin, 0)\n self.text_doc.setHtml(text)\n self.text_doc.setTextWidth(rect.width())\n self.text_doc.drawContents(painter,\n QtCore.QRectF(rect.adjusted(-rect.left(), 0, -margin, 0)))\n painter.restore()\n\n # Restore our previous pen if we need to\n if text_role != QtGui.QPalette.NoRole:\n painter.setPen(saved_pen)", "def draw(self, x, y, char=None, fg=(255, 255, 255), bg=None):\n self.console.draw_char(x, y, char, fg, bg)", "def draw(self, canvas):\n canvas.draw_text(\"Score: \" + str(self.__score), self.__pos, 25 , 'white', 'monospace')" ]
[ "0.7433061", "0.7251996", "0.72452176", "0.7009771", "0.69782376", "0.6925863", "0.6890717", "0.6733484", "0.66595197", "0.6654603", "0.6588631", "0.65804046", "0.65804046", "0.65804046", "0.65804046", "0.6565332", "0.65605956", "0.6522511", "0.650832", "0.64866877", "0.6470589", "0.638472", "0.63811016", "0.638008", "0.6324734", "0.629874", "0.6298205", "0.6273227", "0.6273227", "0.6273227", "0.62659705", "0.626012", "0.625816", "0.6250895", "0.6237254", "0.622533", "0.62074465", "0.61853945", "0.61773044", "0.61720437", "0.6166344", "0.6149943", "0.6138444", "0.6124652", "0.6114198", "0.6102355", "0.610152", "0.60997444", "0.6075406", "0.60693777", "0.60688835", "0.60449666", "0.60368735", "0.60368735", "0.6031647", "0.60231906", "0.60190195", "0.6005413", "0.5998347", "0.5993685", "0.5987327", "0.59835505", "0.59801733", "0.596943", "0.5948121", "0.59313077", "0.5928061", "0.59255165", "0.5915381", "0.59130156", "0.5908473", "0.59048253", "0.5900479", "0.58979154", "0.588472", "0.5879033", "0.58771765", "0.58668566", "0.5852679", "0.5838476", "0.5828917", "0.5826827", "0.5822783", "0.582033", "0.5810343", "0.58057886", "0.58050597", "0.57923985", "0.57798994", "0.57658017", "0.57647", "0.576129", "0.57605904", "0.57529813", "0.57365155", "0.5714576", "0.571279", "0.5710292", "0.57054937", "0.5694079" ]
0.72526735
1